In [474]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pandas_datareader.data as web
import quandl
from datetime import datetime
import requests
import time
import warnings
warnings.filterwarnings('ignore')

from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.optimizers import SGD,Adam
from keras.utils.np_utils import to_categorical
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.preprocessing import MinMaxScaler

import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, LSTM, BatchNormalization
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.callbacks import ModelCheckpoint



import pygad
import pygad.kerasga
In [500]:
#parameters

FUTURE = 5
ROLLING = 5
ROLLING1 = 10
ROLLING2 = 15
In [501]:
#List of features

start = datetime(2005, 1,1)
end = datetime(2022,12,1)


inputs = [ 'NASDAQCOM', 
          'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL', 'VIXCLS', 
          'DCOILWTICO']
fred_df = pd.DataFrame()

for i, name in enumerate(inputs):
    print(f'Series:{name}')
    if i == 0:
        fred_df = web.get_data_fred(name, start, end)
    else:
        _df = web.get_data_fred(name, start, end)
        fred_df = fred_df.join(_df, how = 'outer')
        
Series:NASDAQCOM
Series:DEXJPUS
Series:DEXUSEU
Series:DEXCHUS
Series:DEXUSAL
Series:VIXCLS
Series:DCOILWTICO
In [502]:
#Gold_NIFTY_Russell

gold = quandl.get("LBMA/GOLD", authtoken="kUWytaDzbhXbCX7PDzvg")
gold.index.name = 'DATE'
gold = gold[['USD (AM)']]
gold = gold.reset_index()

starttime = int(time.mktime(datetime(2005, 1,1).timetuple()))
endtime = int(time.mktime(datetime(2022, 12,1).timetuple()))
query_string = f'https://query1.finance.yahoo.com/v7/finance/download/^NSEI?period1={starttime}&period2={endtime}&interval=1d&events=history&includeAdjustedClose=true'
nifty = pd.read_csv(query_string)
nifty = nifty[['Date','Close' ]]
nifty.columns = ['DATE','NIFTY50' ]
nifty['DATE'] = pd.to_datetime(nifty['DATE'])

query_string = f'https://query1.finance.yahoo.com/v7/finance/download/^RUT?period1={starttime}&period2={endtime}&interval=1d&events=history&includeAdjustedClose=true'
russell = pd.read_csv(query_string)
russell = russell[['Date','Close' ]]
russell.columns = ['DATE','RUSSL' ]
russell['DATE'] = pd.to_datetime(russell['DATE'])

query_string = f'https://query1.finance.yahoo.com/v7/finance/download/^GSPC?period1={starttime}&period2={endtime}&interval=1d&events=history&includeAdjustedClose=true'
snp500 = pd.read_csv(query_string)
snp500 = snp500[['Date','Close' ]]
snp500.columns = ['DATE','SNP500' ]
snp500['DATE'] = pd.to_datetime(snp500['DATE'])


fred_df = fred_df.reset_index()
In [503]:
fred_df = pd.merge(pd.merge(fred_df, gold, on = 'DATE'), nifty, on = 'DATE')
In [504]:
fred_df = pd.merge(fred_df, snp500, on = 'DATE')
In [505]:
fred_df.isna().sum()
Out[505]:
DATE           0
NASDAQCOM      0
DEXJPUS       39
DEXUSEU       39
DEXCHUS       39
DEXUSAL       39
VIXCLS         0
DCOILWTICO     7
USD (AM)       0
NIFTY50       10
SNP500         0
dtype: int64
In [506]:
fred_df
Out[506]:
DATE NASDAQCOM DEXJPUS DEXUSEU DEXCHUS DEXUSAL VIXCLS DCOILWTICO USD (AM) NIFTY50 SNP500
0 2007-09-17 2581.66 114.93 1.3860 7.5190 0.8328 26.48 80.55 710.25 4494.649902 1476.650024
1 2007-09-18 2651.66 115.75 1.3869 7.5220 0.8363 20.35 81.51 717.70 4546.200195 1519.780029
2 2007-09-19 2666.48 116.21 1.3950 7.5135 0.8555 20.03 81.99 723.75 4732.350098 1529.030029
3 2007-09-20 2654.29 114.18 1.4092 7.5055 0.8641 20.45 83.85 727.75 4747.549805 1518.750000
4 2007-09-21 2671.22 115.60 1.4076 7.5000 0.8650 19.00 83.38 735.35 4837.549805 1525.750000
... ... ... ... ... ... ... ... ... ... ... ...
3567 2022-11-23 11285.32 139.76 1.0364 7.1580 0.6707 20.35 77.93 1735.75 18267.250000 4027.260010
3568 2022-11-25 11226.36 139.21 1.0402 7.1642 0.6750 20.50 76.45 1753.55 18512.750000 4026.120117
3569 2022-11-28 11049.50 138.67 1.0386 7.2074 0.6694 22.21 77.10 1762.90 18562.750000 3963.939941
3570 2022-11-29 10983.78 138.28 1.0356 7.1568 0.6691 21.89 77.96 1755.35 18618.050781 3957.629883
3571 2022-11-30 11468.00 139.31 1.0323 7.0879 0.6717 20.58 80.48 1759.65 18758.349609 4080.110107

3572 rows × 11 columns

In [507]:
fred_df
Out[507]:
DATE NASDAQCOM DEXJPUS DEXUSEU DEXCHUS DEXUSAL VIXCLS DCOILWTICO USD (AM) NIFTY50 SNP500
0 2007-09-17 2581.66 114.93 1.3860 7.5190 0.8328 26.48 80.55 710.25 4494.649902 1476.650024
1 2007-09-18 2651.66 115.75 1.3869 7.5220 0.8363 20.35 81.51 717.70 4546.200195 1519.780029
2 2007-09-19 2666.48 116.21 1.3950 7.5135 0.8555 20.03 81.99 723.75 4732.350098 1529.030029
3 2007-09-20 2654.29 114.18 1.4092 7.5055 0.8641 20.45 83.85 727.75 4747.549805 1518.750000
4 2007-09-21 2671.22 115.60 1.4076 7.5000 0.8650 19.00 83.38 735.35 4837.549805 1525.750000
... ... ... ... ... ... ... ... ... ... ... ...
3567 2022-11-23 11285.32 139.76 1.0364 7.1580 0.6707 20.35 77.93 1735.75 18267.250000 4027.260010
3568 2022-11-25 11226.36 139.21 1.0402 7.1642 0.6750 20.50 76.45 1753.55 18512.750000 4026.120117
3569 2022-11-28 11049.50 138.67 1.0386 7.2074 0.6694 22.21 77.10 1762.90 18562.750000 3963.939941
3570 2022-11-29 10983.78 138.28 1.0356 7.1568 0.6691 21.89 77.96 1755.35 18618.050781 3957.629883
3571 2022-11-30 11468.00 139.31 1.0323 7.0879 0.6717 20.58 80.48 1759.65 18758.349609 4080.110107

3572 rows × 11 columns

In [508]:
fred_df['MONTH'] = fred_df['DATE'].dt.month
fred_df['DAY'] = fred_df['DATE'].dt.day
In [100]:
#fred_df.to_csv('fred_df.csv')
In [12]:
#fred_df = pd.read_csv('fred_df.csv', index_col= 'Unnamed: 0')
In [13]:
fred_df.columns = ['DATE', 'NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'DCOILWTICO', 'GOLD', 'NIFTY50', 'SNP500', 'MONTH',
       'DAY']
In [14]:
cols = ['NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'GOLD', 'NIFTY50', 'SNP500', 'DCOILWTICO']
In [15]:
fred_df.dropna(inplace = True)
In [16]:
for i in cols:
    col_name_roll = f'{i}_rolling_5'
    col_name_roll1 = f'{i}_rolling_10'
    col_name_roll2 = f'{i}_rolling_15'
    fred_df[col_name_roll] = fred_df[i].rolling(window = ROLLING).mean()
    fred_df[col_name_roll1] = fred_df[i].rolling(window = ROLLING1).mean()
    fred_df[col_name_roll2] = fred_df[i].rolling(window = ROLLING2).mean()
In [17]:
fred_df.isna().sum()
Out[17]:
DATE                      0
NASDAQCOM                 0
DEXJPUS                   0
DEXUSEU                   0
DEXCHUS                   0
DEXUSAL                   0
VIXCLS                    0
DCOILWTICO                0
GOLD                      0
NIFTY50                   0
SNP500                    0
MONTH                     0
DAY                       0
NASDAQCOM_rolling_5       4
NASDAQCOM_rolling_10      9
NASDAQCOM_rolling_15     14
DEXJPUS_rolling_5         4
DEXJPUS_rolling_10        9
DEXJPUS_rolling_15       14
DEXUSEU_rolling_5         4
DEXUSEU_rolling_10        9
DEXUSEU_rolling_15       14
DEXCHUS_rolling_5         4
DEXCHUS_rolling_10        9
DEXCHUS_rolling_15       14
DEXUSAL_rolling_5         4
DEXUSAL_rolling_10        9
DEXUSAL_rolling_15       14
VIXCLS_rolling_5          4
VIXCLS_rolling_10         9
VIXCLS_rolling_15        14
GOLD_rolling_5            4
GOLD_rolling_10           9
GOLD_rolling_15          14
NIFTY50_rolling_5         4
NIFTY50_rolling_10        9
NIFTY50_rolling_15       14
SNP500_rolling_5          4
SNP500_rolling_10         9
SNP500_rolling_15        14
DCOILWTICO_rolling_5      4
DCOILWTICO_rolling_10     9
DCOILWTICO_rolling_15    14
dtype: int64
In [18]:
fred_df.columns
Out[18]:
Index(['DATE', 'NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'DCOILWTICO', 'GOLD', 'NIFTY50', 'SNP500', 'MONTH', 'DAY',
       'NASDAQCOM_rolling_5', 'NASDAQCOM_rolling_10', 'NASDAQCOM_rolling_15',
       'DEXJPUS_rolling_5', 'DEXJPUS_rolling_10', 'DEXJPUS_rolling_15',
       'DEXUSEU_rolling_5', 'DEXUSEU_rolling_10', 'DEXUSEU_rolling_15',
       'DEXCHUS_rolling_5', 'DEXCHUS_rolling_10', 'DEXCHUS_rolling_15',
       'DEXUSAL_rolling_5', 'DEXUSAL_rolling_10', 'DEXUSAL_rolling_15',
       'VIXCLS_rolling_5', 'VIXCLS_rolling_10', 'VIXCLS_rolling_15',
       'GOLD_rolling_5', 'GOLD_rolling_10', 'GOLD_rolling_15',
       'NIFTY50_rolling_5', 'NIFTY50_rolling_10', 'NIFTY50_rolling_15',
       'SNP500_rolling_5', 'SNP500_rolling_10', 'SNP500_rolling_15',
       'DCOILWTICO_rolling_5', 'DCOILWTICO_rolling_10',
       'DCOILWTICO_rolling_15'],
      dtype='object')
In [19]:
months = pd.get_dummies(fred_df['MONTH'], drop_first=True)
months.columns = ['FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']

day = pd.get_dummies(fred_df['DAY'], drop_first=True)

fred_df = pd.concat([fred_df, months, day], axis = 1)
In [20]:
fred_df['OIL_PRICE_FUTURE'] = fred_df['DCOILWTICO'].shift(-FUTURE)
In [21]:
fred_df
Out[21]:
DATE NASDAQCOM DEXJPUS DEXUSEU DEXCHUS DEXUSAL VIXCLS DCOILWTICO GOLD NIFTY50 ... 23 24 25 26 27 28 29 30 31 OIL_PRICE_FUTURE
0 2007-09-17 2581.66 114.93 1.3860 7.5190 0.8328 26.48 80.55 710.25 4494.649902 ... 0 0 0 0 0 0 0 0 0 82.51
1 2007-09-18 2651.66 115.75 1.3869 7.5220 0.8363 20.35 81.51 717.70 4546.200195 ... 0 0 0 0 0 0 0 0 0 81.20
2 2007-09-19 2666.48 116.21 1.3950 7.5135 0.8555 20.03 81.99 723.75 4732.350098 ... 0 0 0 0 0 0 0 0 0 80.31
3 2007-09-20 2654.29 114.18 1.4092 7.5055 0.8641 20.45 83.85 727.75 4747.549805 ... 0 0 0 0 0 0 0 0 0 82.86
4 2007-09-21 2671.22 115.60 1.4076 7.5000 0.8650 19.00 83.38 735.35 4837.549805 ... 0 0 0 0 0 0 0 0 0 81.64
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
3567 2022-11-23 11285.32 139.76 1.0364 7.1580 0.6707 20.35 77.93 1735.75 18267.250000 ... 1 0 0 0 0 0 0 0 0 NaN
3568 2022-11-25 11226.36 139.21 1.0402 7.1642 0.6750 20.50 76.45 1753.55 18512.750000 ... 0 0 1 0 0 0 0 0 0 NaN
3569 2022-11-28 11049.50 138.67 1.0386 7.2074 0.6694 22.21 77.10 1762.90 18562.750000 ... 0 0 0 0 0 1 0 0 0 NaN
3570 2022-11-29 10983.78 138.28 1.0356 7.1568 0.6691 21.89 77.96 1755.35 18618.050781 ... 0 0 0 0 0 0 1 0 0 NaN
3571 2022-11-30 11468.00 139.31 1.0323 7.0879 0.6717 20.58 80.48 1759.65 18758.349609 ... 0 0 0 0 0 0 0 1 0 NaN

3519 rows × 85 columns

In [22]:
del fred_df['DAY']
del fred_df['MONTH']
In [23]:
cols = ['NASDAQCOM',
                     'DEXJPUS',               'DEXUSEU',
                     'DEXCHUS',               'DEXUSAL',
                      'VIXCLS',            'DCOILWTICO',
                        'GOLD',               'NIFTY50',
                      'SNP500',   'NASDAQCOM_rolling_5',
        'NASDAQCOM_rolling_10',  'NASDAQCOM_rolling_15',
           'DEXJPUS_rolling_5',    'DEXJPUS_rolling_10',
          'DEXJPUS_rolling_15',     'DEXUSEU_rolling_5',
          'DEXUSEU_rolling_10',    'DEXUSEU_rolling_15',
           'DEXCHUS_rolling_5',    'DEXCHUS_rolling_10',
          'DEXCHUS_rolling_15',     'DEXUSAL_rolling_5',
          'DEXUSAL_rolling_10',    'DEXUSAL_rolling_15',
            'VIXCLS_rolling_5',     'VIXCLS_rolling_10',
           'VIXCLS_rolling_15',        'GOLD_rolling_5',
             'GOLD_rolling_10',       'GOLD_rolling_15',
           'NIFTY50_rolling_5',    'NIFTY50_rolling_10',
          'NIFTY50_rolling_15',      'SNP500_rolling_5',
           'SNP500_rolling_10',     'SNP500_rolling_15',
        'DCOILWTICO_rolling_5', 'DCOILWTICO_rolling_10',
       'DCOILWTICO_rolling_15']
In [24]:
minmaxscaler = MinMaxScaler()

fred_df[cols] = minmaxscaler.fit_transform(fred_df.loc[:, fred_df.columns.isin(cols)])
In [25]:
fred_df.dropna(inplace = True)
In [26]:
fred_df.drop(columns='DATE', inplace = True)
In [ ]:
 
In [27]:
fred_df = fred_df.reset_index()
del fred_df['index']
In [28]:
fred_df
Out[28]:
NASDAQCOM DEXJPUS DEXUSEU DEXCHUS DEXUSAL VIXCLS DCOILWTICO GOLD NIFTY50 SNP500 ... 23 24 25 26 27 28 29 30 31 OIL_PRICE_FUTURE
0 0.103139 0.554791 0.709728 0.991767 0.615253 0.113120 0.636074 0.032140 0.157748 0.212788 ... 0 0 0 0 0 0 0 0 0 86.19
1 0.104262 0.556815 0.700031 0.991564 0.610890 0.094901 0.642986 0.026516 0.172664 0.215841 ... 0 0 0 0 0 0 0 0 0 87.58
2 0.104785 0.559379 0.710197 0.990215 0.614115 0.102379 0.648856 0.035062 0.179698 0.215190 ... 0 0 0 0 0 0 0 0 0 87.19
3 0.102109 0.566667 0.721301 0.990552 0.623791 0.132427 0.658456 0.040175 0.184836 0.213232 ... 0 0 0 0 0 0 0 0 0 89.48
4 0.104383 0.563833 0.711917 0.991227 0.623601 0.116791 0.662187 0.040906 0.178885 0.215027 ... 0 0 0 0 0 0 0 0 0 88.58
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
3495 0.673343 0.861943 0.121833 0.712782 0.185544 0.203535 0.672555 0.796713 0.978521 0.797244 ... 0 0 0 0 0 0 0 0 0 77.93
3496 0.670715 0.873549 0.113388 0.752396 0.172832 0.201088 0.650996 0.783090 0.974471 0.794273 ... 0 0 0 0 0 0 0 0 0 76.45
3497 0.670789 0.867881 0.114639 0.728168 0.175678 0.190075 0.642109 0.783236 0.972238 0.798835 ... 0 0 0 0 0 0 0 0 0 77.10
3498 0.662535 0.893792 0.097279 0.758402 0.158414 0.179742 0.640298 0.764901 0.963139 0.795094 ... 0 0 0 0 0 0 0 0 0 77.96
3499 0.672715 0.884885 0.103222 0.741396 0.166951 0.165194 0.646278 0.770270 0.968329 0.808123 ... 0 0 0 0 0 0 0 0 0 80.48

3500 rows × 82 columns

In [29]:
thres_hold = int(-0.10*(fred_df.shape[0]))

training_dataset = fred_df.iloc[:thres_hold, :]
test_dataset = fred_df.iloc[thres_hold:, :]
In [30]:
y_train = np.array(training_dataset['OIL_PRICE_FUTURE'].values)
y_test = np.array(test_dataset['OIL_PRICE_FUTURE'].values)

X_train = training_dataset.loc[:, training_dataset.columns != 'OIL_PRICE_FUTURE']
X_test = test_dataset.loc[:, test_dataset.columns != 'OIL_PRICE_FUTURE']

X_train_arr = np.array(X_train)
X_test_arr = np.array(X_test)
In [60]:
plt.plot(y_test)
Out[60]:
[<matplotlib.lines.Line2D at 0x2e2201760>]
In [62]:
model_nn = Sequential()
model_nn.add(Dense(128, activation = 'sigmoid',input_shape = (X_train.shape[1],)))
model_nn.add(Dropout(0.2))
model_nn.add(BatchNormalization())
model_nn.add(Dense(128, activation = 'relu'))
model_nn.add(Dense(128, activation = 'relu'))
model_nn.add(Dense(1, activation='relu'))

opt = Adam()

model_nn.compile(optimizer=opt,  loss='mean_squared_error', metrics='mean_squared_error')
In [64]:
history_nn = model_nn.fit(X_train_arr, y_train, epochs = 50, 
             validation_data = [X_test_arr, y_test])
Epoch 1/50
 1/99 [..............................] - ETA: 24s - loss: 5707.5820 - mean_squared_error: 5707.5820
2022-12-08 12:47:25.895447: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
99/99 [==============================] - 1s 9ms/step - loss: 1593.0588 - mean_squared_error: 1593.0588 - val_loss: 506.7443 - val_mean_squared_error: 506.7443
Epoch 2/50
10/99 [==>...........................] - ETA: 0s - loss: 375.2274 - mean_squared_error: 375.2274
2022-12-08 12:47:26.842667: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
99/99 [==============================] - 1s 6ms/step - loss: 310.4996 - mean_squared_error: 310.4996 - val_loss: 1560.5497 - val_mean_squared_error: 1560.5497
Epoch 3/50
99/99 [==============================] - 1s 6ms/step - loss: 181.0571 - mean_squared_error: 181.0571 - val_loss: 2365.4963 - val_mean_squared_error: 2365.4963
Epoch 4/50
99/99 [==============================] - 1s 6ms/step - loss: 143.3135 - mean_squared_error: 143.3135 - val_loss: 2467.3264 - val_mean_squared_error: 2467.3264
Epoch 5/50
99/99 [==============================] - 1s 6ms/step - loss: 123.0720 - mean_squared_error: 123.0720 - val_loss: 2425.4399 - val_mean_squared_error: 2425.4399
Epoch 6/50
99/99 [==============================] - 1s 6ms/step - loss: 107.2480 - mean_squared_error: 107.2480 - val_loss: 2120.1809 - val_mean_squared_error: 2120.1807
Epoch 7/50
99/99 [==============================] - 1s 6ms/step - loss: 93.3354 - mean_squared_error: 93.3354 - val_loss: 1849.6232 - val_mean_squared_error: 1849.6232
Epoch 8/50
99/99 [==============================] - 1s 6ms/step - loss: 80.4635 - mean_squared_error: 80.4635 - val_loss: 1924.1687 - val_mean_squared_error: 1924.1687
Epoch 9/50
99/99 [==============================] - 1s 6ms/step - loss: 69.0091 - mean_squared_error: 69.0091 - val_loss: 1583.9023 - val_mean_squared_error: 1583.9023
Epoch 10/50
99/99 [==============================] - 1s 6ms/step - loss: 58.8279 - mean_squared_error: 58.8279 - val_loss: 1449.1304 - val_mean_squared_error: 1449.1304
Epoch 11/50
99/99 [==============================] - 1s 6ms/step - loss: 50.6160 - mean_squared_error: 50.6160 - val_loss: 1275.7494 - val_mean_squared_error: 1275.7494
Epoch 12/50
99/99 [==============================] - 1s 6ms/step - loss: 45.7804 - mean_squared_error: 45.7804 - val_loss: 1181.6930 - val_mean_squared_error: 1181.6930
Epoch 13/50
99/99 [==============================] - 1s 6ms/step - loss: 39.8369 - mean_squared_error: 39.8369 - val_loss: 1094.9023 - val_mean_squared_error: 1094.9023
Epoch 14/50
99/99 [==============================] - 1s 6ms/step - loss: 35.5055 - mean_squared_error: 35.5055 - val_loss: 994.6874 - val_mean_squared_error: 994.6874
Epoch 15/50
99/99 [==============================] - 1s 6ms/step - loss: 32.1742 - mean_squared_error: 32.1742 - val_loss: 842.0473 - val_mean_squared_error: 842.0473
Epoch 16/50
99/99 [==============================] - 1s 6ms/step - loss: 30.7230 - mean_squared_error: 30.7230 - val_loss: 755.7537 - val_mean_squared_error: 755.7537
Epoch 17/50
99/99 [==============================] - 1s 6ms/step - loss: 26.5894 - mean_squared_error: 26.5894 - val_loss: 781.2211 - val_mean_squared_error: 781.2211
Epoch 18/50
99/99 [==============================] - 1s 6ms/step - loss: 25.8950 - mean_squared_error: 25.8950 - val_loss: 653.6312 - val_mean_squared_error: 653.6312
Epoch 19/50
99/99 [==============================] - 1s 6ms/step - loss: 23.9193 - mean_squared_error: 23.9193 - val_loss: 546.8887 - val_mean_squared_error: 546.8887
Epoch 20/50
99/99 [==============================] - 1s 6ms/step - loss: 23.0903 - mean_squared_error: 23.0903 - val_loss: 589.3706 - val_mean_squared_error: 589.3706
Epoch 21/50
99/99 [==============================] - 1s 6ms/step - loss: 23.1466 - mean_squared_error: 23.1466 - val_loss: 548.8435 - val_mean_squared_error: 548.8435
Epoch 22/50
99/99 [==============================] - 1s 6ms/step - loss: 22.2571 - mean_squared_error: 22.2571 - val_loss: 598.1765 - val_mean_squared_error: 598.1765
Epoch 23/50
99/99 [==============================] - 1s 6ms/step - loss: 21.2476 - mean_squared_error: 21.2476 - val_loss: 506.6896 - val_mean_squared_error: 506.6896
Epoch 24/50
99/99 [==============================] - 1s 6ms/step - loss: 20.6850 - mean_squared_error: 20.6850 - val_loss: 501.8245 - val_mean_squared_error: 501.8245
Epoch 25/50
99/99 [==============================] - 1s 6ms/step - loss: 20.3076 - mean_squared_error: 20.3076 - val_loss: 484.3413 - val_mean_squared_error: 484.3413
Epoch 26/50
99/99 [==============================] - 1s 6ms/step - loss: 20.6663 - mean_squared_error: 20.6663 - val_loss: 475.2571 - val_mean_squared_error: 475.2571
Epoch 27/50
99/99 [==============================] - 1s 6ms/step - loss: 19.7844 - mean_squared_error: 19.7844 - val_loss: 509.0642 - val_mean_squared_error: 509.0642
Epoch 28/50
99/99 [==============================] - 1s 6ms/step - loss: 19.5921 - mean_squared_error: 19.5921 - val_loss: 477.2298 - val_mean_squared_error: 477.2298
Epoch 29/50
99/99 [==============================] - 1s 6ms/step - loss: 19.3653 - mean_squared_error: 19.3653 - val_loss: 454.3841 - val_mean_squared_error: 454.3841
Epoch 30/50
99/99 [==============================] - 1s 6ms/step - loss: 19.1276 - mean_squared_error: 19.1276 - val_loss: 511.6975 - val_mean_squared_error: 511.6975
Epoch 31/50
99/99 [==============================] - 1s 6ms/step - loss: 19.0089 - mean_squared_error: 19.0089 - val_loss: 476.7577 - val_mean_squared_error: 476.7577
Epoch 32/50
99/99 [==============================] - 1s 6ms/step - loss: 18.8278 - mean_squared_error: 18.8278 - val_loss: 446.1084 - val_mean_squared_error: 446.1084
Epoch 33/50
99/99 [==============================] - 1s 6ms/step - loss: 18.8142 - mean_squared_error: 18.8142 - val_loss: 473.3198 - val_mean_squared_error: 473.3198
Epoch 34/50
99/99 [==============================] - 1s 6ms/step - loss: 18.5110 - mean_squared_error: 18.5110 - val_loss: 469.9340 - val_mean_squared_error: 469.9339
Epoch 35/50
99/99 [==============================] - 1s 6ms/step - loss: 19.8652 - mean_squared_error: 19.8652 - val_loss: 475.3361 - val_mean_squared_error: 475.3361
Epoch 36/50
99/99 [==============================] - 1s 6ms/step - loss: 18.4262 - mean_squared_error: 18.4262 - val_loss: 493.7228 - val_mean_squared_error: 493.7228
Epoch 37/50
99/99 [==============================] - 1s 6ms/step - loss: 18.3382 - mean_squared_error: 18.3382 - val_loss: 463.2496 - val_mean_squared_error: 463.2496
Epoch 38/50
99/99 [==============================] - 1s 6ms/step - loss: 18.9124 - mean_squared_error: 18.9124 - val_loss: 497.4500 - val_mean_squared_error: 497.4500
Epoch 39/50
99/99 [==============================] - 1s 6ms/step - loss: 17.7028 - mean_squared_error: 17.7028 - val_loss: 474.4933 - val_mean_squared_error: 474.4933
Epoch 40/50
99/99 [==============================] - 1s 6ms/step - loss: 17.6716 - mean_squared_error: 17.6716 - val_loss: 447.0229 - val_mean_squared_error: 447.0229
Epoch 41/50
99/99 [==============================] - 1s 6ms/step - loss: 17.4227 - mean_squared_error: 17.4227 - val_loss: 547.8528 - val_mean_squared_error: 547.8528
Epoch 42/50
99/99 [==============================] - 1s 6ms/step - loss: 18.6590 - mean_squared_error: 18.6590 - val_loss: 478.5550 - val_mean_squared_error: 478.5550
Epoch 43/50
99/99 [==============================] - 1s 6ms/step - loss: 17.1359 - mean_squared_error: 17.1359 - val_loss: 448.7675 - val_mean_squared_error: 448.7675
Epoch 44/50
99/99 [==============================] - 1s 6ms/step - loss: 17.3916 - mean_squared_error: 17.3916 - val_loss: 512.2160 - val_mean_squared_error: 512.2160
Epoch 45/50
99/99 [==============================] - 1s 6ms/step - loss: 17.4679 - mean_squared_error: 17.4679 - val_loss: 502.5040 - val_mean_squared_error: 502.5040
Epoch 46/50
99/99 [==============================] - 1s 6ms/step - loss: 18.2532 - mean_squared_error: 18.2532 - val_loss: 554.9775 - val_mean_squared_error: 554.9775
Epoch 47/50
99/99 [==============================] - 1s 6ms/step - loss: 17.0852 - mean_squared_error: 17.0852 - val_loss: 512.9082 - val_mean_squared_error: 512.9082
Epoch 48/50
99/99 [==============================] - 1s 6ms/step - loss: 17.8794 - mean_squared_error: 17.8794 - val_loss: 519.7421 - val_mean_squared_error: 519.7421
Epoch 49/50
99/99 [==============================] - 1s 6ms/step - loss: 17.0757 - mean_squared_error: 17.0757 - val_loss: 489.1199 - val_mean_squared_error: 489.1199
Epoch 50/50
99/99 [==============================] - 1s 6ms/step - loss: 16.7911 - mean_squared_error: 16.7911 - val_loss: 571.3528 - val_mean_squared_error: 571.3528
In [65]:
plt.plot(history_nn.history['loss'])
plt.plot(history_nn.history['val_loss'], label = 'Validation Loss')
plt.legend()
Out[65]:
<matplotlib.legend.Legend at 0x15c11f790>
In [75]:
ynew_nn_train = model_nn.predict(X_train)
ynew_nn_test = model_nn.predict(X_test_arr)
2022-12-08 12:50:46.408415: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [67]:
#LSTM Model
In [83]:
model_lstm = Sequential()

model_lstm.add(LSTM(128, input_shape=(X_train.shape[1],1), return_sequences=True))
model_lstm.add(Dropout(0.2))
model_lstm.add(BatchNormalization())  #normalizes activation outputs, same reason you want to normalize your input data.

model_lstm.add(LSTM(128, return_sequences=True))
model_lstm.add(Dropout(0.1))
model_lstm.add(BatchNormalization())

model_lstm.add(LSTM(128))
model_lstm.add(Dropout(0.2))
model_lstm.add(BatchNormalization())

model_lstm.add(Dense(32, activation='relu'))
model_lstm.add(Dropout(0.2))

model_lstm.add(Dense(1, activation='relu'))

opt = tf.keras.optimizers.Adam(lr=0.001, decay=1e-6)

# Compile model
model_lstm.compile(
    loss='mean_squared_error',
    optimizer=opt,
    metrics=['mean_squared_error']
)
In [84]:
history1 = model_lstm.fit(
        X_train, y_train,
    batch_size=32,
    epochs=50,
    validation_data=(X_test, y_test),
)
Epoch 1/50
2022-12-08 12:52:48.295498: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:48.773862: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:48.918696: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:49.068744: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:49.304282: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:49.561802: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:49.813176: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
99/99 [==============================] - ETA: 0s - loss: 4230.4355 - mean_squared_error: 4230.4355
2022-12-08 12:52:57.590453: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:57.721704: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:57.828778: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:52:57.938292: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
99/99 [==============================] - 11s 88ms/step - loss: 4230.4355 - mean_squared_error: 4230.4355 - val_loss: 4205.9863 - val_mean_squared_error: 4205.9863
Epoch 2/50
99/99 [==============================] - 6s 63ms/step - loss: 2726.0366 - mean_squared_error: 2726.0366 - val_loss: 976.9995 - val_mean_squared_error: 976.9995
Epoch 3/50
99/99 [==============================] - 6s 63ms/step - loss: 1749.7466 - mean_squared_error: 1749.7466 - val_loss: 494.2462 - val_mean_squared_error: 494.2462
Epoch 4/50
99/99 [==============================] - 6s 63ms/step - loss: 1704.6195 - mean_squared_error: 1704.6195 - val_loss: 584.8573 - val_mean_squared_error: 584.8573
Epoch 5/50
99/99 [==============================] - 6s 63ms/step - loss: 1666.7882 - mean_squared_error: 1666.7882 - val_loss: 360.2236 - val_mean_squared_error: 360.2236
Epoch 6/50
99/99 [==============================] - 6s 63ms/step - loss: 1684.0891 - mean_squared_error: 1684.0891 - val_loss: 874.7658 - val_mean_squared_error: 874.7658
Epoch 7/50
99/99 [==============================] - 6s 63ms/step - loss: 1582.9504 - mean_squared_error: 1582.9504 - val_loss: 693.6538 - val_mean_squared_error: 693.6538
Epoch 8/50
99/99 [==============================] - 6s 63ms/step - loss: 1325.5155 - mean_squared_error: 1325.5155 - val_loss: 360.6150 - val_mean_squared_error: 360.6150
Epoch 9/50
99/99 [==============================] - 6s 63ms/step - loss: 1179.2050 - mean_squared_error: 1179.2050 - val_loss: 7834.3628 - val_mean_squared_error: 7834.3628
Epoch 10/50
99/99 [==============================] - 6s 63ms/step - loss: 1143.6530 - mean_squared_error: 1143.6530 - val_loss: 7815.6265 - val_mean_squared_error: 7815.6265
Epoch 11/50
99/99 [==============================] - 6s 63ms/step - loss: 1102.0300 - mean_squared_error: 1102.0300 - val_loss: 967.0382 - val_mean_squared_error: 967.0382
Epoch 12/50
99/99 [==============================] - 6s 64ms/step - loss: 1156.9025 - mean_squared_error: 1156.9025 - val_loss: 833.3932 - val_mean_squared_error: 833.3932
Epoch 13/50
99/99 [==============================] - 6s 63ms/step - loss: 1125.1029 - mean_squared_error: 1125.1029 - val_loss: 1074.2975 - val_mean_squared_error: 1074.2975
Epoch 14/50
99/99 [==============================] - 6s 64ms/step - loss: 1108.7242 - mean_squared_error: 1108.7242 - val_loss: 1571.6157 - val_mean_squared_error: 1571.6157
Epoch 15/50
99/99 [==============================] - 6s 63ms/step - loss: 1110.6202 - mean_squared_error: 1110.6202 - val_loss: 722.4402 - val_mean_squared_error: 722.4402
Epoch 16/50
99/99 [==============================] - 6s 64ms/step - loss: 1084.3549 - mean_squared_error: 1084.3549 - val_loss: 1108.8022 - val_mean_squared_error: 1108.8021
Epoch 17/50
99/99 [==============================] - 6s 64ms/step - loss: 1110.3868 - mean_squared_error: 1110.3868 - val_loss: 907.7257 - val_mean_squared_error: 907.7257
Epoch 18/50
99/99 [==============================] - 6s 63ms/step - loss: 1140.1252 - mean_squared_error: 1140.1252 - val_loss: 1231.2783 - val_mean_squared_error: 1231.2783
Epoch 19/50
99/99 [==============================] - 6s 64ms/step - loss: 1091.4835 - mean_squared_error: 1091.4835 - val_loss: 1079.5897 - val_mean_squared_error: 1079.5897
Epoch 20/50
99/99 [==============================] - 6s 64ms/step - loss: 1112.8717 - mean_squared_error: 1112.8717 - val_loss: 1197.7202 - val_mean_squared_error: 1197.7202
Epoch 21/50
99/99 [==============================] - 6s 64ms/step - loss: 1088.7268 - mean_squared_error: 1088.7268 - val_loss: 744.6910 - val_mean_squared_error: 744.6910
Epoch 22/50
99/99 [==============================] - 6s 64ms/step - loss: 1096.2015 - mean_squared_error: 1096.2015 - val_loss: 1038.1469 - val_mean_squared_error: 1038.1469
Epoch 23/50
99/99 [==============================] - 6s 64ms/step - loss: 1102.7847 - mean_squared_error: 1102.7847 - val_loss: 951.3005 - val_mean_squared_error: 951.3004
Epoch 24/50
99/99 [==============================] - 6s 64ms/step - loss: 1085.6829 - mean_squared_error: 1085.6829 - val_loss: 1253.6191 - val_mean_squared_error: 1253.6191
Epoch 25/50
99/99 [==============================] - 6s 64ms/step - loss: 1057.9429 - mean_squared_error: 1057.9429 - val_loss: 1318.0912 - val_mean_squared_error: 1318.0912
Epoch 26/50
99/99 [==============================] - 6s 64ms/step - loss: 1094.1671 - mean_squared_error: 1094.1671 - val_loss: 1056.1107 - val_mean_squared_error: 1056.1107
Epoch 27/50
99/99 [==============================] - 6s 64ms/step - loss: 1073.0592 - mean_squared_error: 1073.0592 - val_loss: 567.1240 - val_mean_squared_error: 567.1240
Epoch 28/50
99/99 [==============================] - 6s 65ms/step - loss: 1105.6030 - mean_squared_error: 1105.6030 - val_loss: 884.9732 - val_mean_squared_error: 884.9732
Epoch 29/50
99/99 [==============================] - 6s 64ms/step - loss: 1128.2102 - mean_squared_error: 1128.2102 - val_loss: 1164.2980 - val_mean_squared_error: 1164.2980
Epoch 30/50
99/99 [==============================] - 7s 66ms/step - loss: 1152.4695 - mean_squared_error: 1152.4695 - val_loss: 1323.8398 - val_mean_squared_error: 1323.8397
Epoch 31/50
99/99 [==============================] - 6s 64ms/step - loss: 659.5083 - mean_squared_error: 659.5083 - val_loss: 13852.5322 - val_mean_squared_error: 13852.5322
Epoch 32/50
99/99 [==============================] - 6s 65ms/step - loss: 411.9696 - mean_squared_error: 411.9696 - val_loss: 538.8729 - val_mean_squared_error: 538.8729
Epoch 33/50
99/99 [==============================] - 6s 65ms/step - loss: 439.7666 - mean_squared_error: 439.7666 - val_loss: 661.7154 - val_mean_squared_error: 661.7154
Epoch 34/50
99/99 [==============================] - 6s 65ms/step - loss: 413.7050 - mean_squared_error: 413.7050 - val_loss: 2247.0698 - val_mean_squared_error: 2247.0698
Epoch 35/50
99/99 [==============================] - 7s 66ms/step - loss: 404.4014 - mean_squared_error: 404.4014 - val_loss: 2005.0566 - val_mean_squared_error: 2005.0566
Epoch 36/50
99/99 [==============================] - 6s 65ms/step - loss: 422.0903 - mean_squared_error: 422.0903 - val_loss: 1692.3689 - val_mean_squared_error: 1692.3689
Epoch 37/50
99/99 [==============================] - 6s 65ms/step - loss: 391.6375 - mean_squared_error: 391.6375 - val_loss: 1269.7179 - val_mean_squared_error: 1269.7179
Epoch 38/50
99/99 [==============================] - 6s 65ms/step - loss: 409.5898 - mean_squared_error: 409.5898 - val_loss: 1343.4760 - val_mean_squared_error: 1343.4760
Epoch 39/50
99/99 [==============================] - 6s 66ms/step - loss: 407.5581 - mean_squared_error: 407.5581 - val_loss: 995.4271 - val_mean_squared_error: 995.4269
Epoch 40/50
99/99 [==============================] - 6s 65ms/step - loss: 376.8974 - mean_squared_error: 376.8974 - val_loss: 1039.2869 - val_mean_squared_error: 1039.2869
Epoch 41/50
99/99 [==============================] - 6s 66ms/step - loss: 389.4229 - mean_squared_error: 389.4229 - val_loss: 1165.2218 - val_mean_squared_error: 1165.2218
Epoch 42/50
99/99 [==============================] - 7s 66ms/step - loss: 426.8871 - mean_squared_error: 426.8871 - val_loss: 867.5423 - val_mean_squared_error: 867.5423
Epoch 43/50
99/99 [==============================] - 6s 66ms/step - loss: 366.3143 - mean_squared_error: 366.3143 - val_loss: 1499.9716 - val_mean_squared_error: 1499.9716
Epoch 44/50
99/99 [==============================] - 6s 65ms/step - loss: 376.6009 - mean_squared_error: 376.6009 - val_loss: 1096.4568 - val_mean_squared_error: 1096.4568
Epoch 45/50
99/99 [==============================] - 7s 66ms/step - loss: 383.0638 - mean_squared_error: 383.0638 - val_loss: 1133.1332 - val_mean_squared_error: 1133.1332
Epoch 46/50
99/99 [==============================] - 7s 66ms/step - loss: 250.0545 - mean_squared_error: 250.0545 - val_loss: 317.8355 - val_mean_squared_error: 317.8355
Epoch 47/50
99/99 [==============================] - 7s 66ms/step - loss: 54.7269 - mean_squared_error: 54.7269 - val_loss: 1027.7544 - val_mean_squared_error: 1027.7544
Epoch 48/50
99/99 [==============================] - 7s 66ms/step - loss: 46.2900 - mean_squared_error: 46.2900 - val_loss: 636.3821 - val_mean_squared_error: 636.3821
Epoch 49/50
99/99 [==============================] - 6s 66ms/step - loss: 48.6717 - mean_squared_error: 48.6717 - val_loss: 906.2958 - val_mean_squared_error: 906.2958
Epoch 50/50
99/99 [==============================] - 7s 66ms/step - loss: 52.5867 - mean_squared_error: 52.5867 - val_loss: 710.5558 - val_mean_squared_error: 710.5558
In [85]:
plt.plot(history1.history['loss'])
plt.plot(history1.history['val_loss'], label = 'Validation Loss')
plt.legend()
Out[85]:
<matplotlib.legend.Legend at 0x15d440af0>
In [86]:
ynew1_train = model_lstm.predict(X_train)
ynew1_test = model_lstm.predict(X_test_arr)
2022-12-08 12:58:11.255975: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:11.398876: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:11.526574: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:11.661074: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:15.308038: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:15.451688: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:15.557328: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 12:58:15.671776: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
In [87]:
plt.figure(figsize = (12,6), dpi = 400)
plt.plot(y_train, label = 'Actual')
plt.plot(ynew_nn_train, label = 'Model 1')
plt.plot(ynew1_train, label = 'Model 2')
plt.legend()
Out[87]:
<matplotlib.legend.Legend at 0x334c228b0>
In [89]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot(y_test, label = 'Actual')
plt.plot(ynew_nn_test, label = 'Model 1')
plt.plot(ynew1_test, label = 'Model 2')
plt.legend()
plt.ylim(20, 150)
Out[89]:
(20.0, 150.0)
In [ ]:
 
In [90]:
#Without CoVID
In [ ]:
 
In [484]:
#fred_df= pd.read_csv('fred_df.csv', index_col= 'Unnamed: 0')
In [509]:
fred_df.columns = ['DATE', 'NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'DCOILWTICO', 'GOLD', 'NIFTY50', 'SNP500', 'MONTH',
       'DAY']

cols = ['NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'GOLD', 'NIFTY50', 'SNP500', 'DCOILWTICO']

fred_df.dropna(inplace = True)

for i in cols:
    col_name_roll = f'{i}_rolling_5'
    col_name_roll1 = f'{i}_rolling_10'
    col_name_roll2 = f'{i}_rolling_15'
    fred_df[col_name_roll] = fred_df[i].rolling(window = ROLLING).mean()
    fred_df[col_name_roll1] = fred_df[i].rolling(window = ROLLING1).mean()
    fred_df[col_name_roll2] = fred_df[i].rolling(window = ROLLING2).mean()
    
months = pd.get_dummies(fred_df['MONTH'], drop_first=True)
months.columns = ['FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']

day = pd.get_dummies(fred_df['DAY'], drop_first=True)

fred_df = pd.concat([fred_df, months, day], axis = 1)
In [510]:
fred_df = fred_df[fred_df['DATE'] <= datetime(2020,3,1)]
In [511]:
fred_df['OIL_PRICE_FUTURE'] = fred_df['DCOILWTICO'].shift(-FUTURE)
In [512]:
del fred_df['DAY']
del fred_df['MONTH']
In [513]:
cols = ['NASDAQCOM',
                     'DEXJPUS',               'DEXUSEU',
                     'DEXCHUS',               'DEXUSAL',
                      'VIXCLS',            'DCOILWTICO',
                        'GOLD',               'NIFTY50',
                      'SNP500',   'NASDAQCOM_rolling_5',
        'NASDAQCOM_rolling_10',  'NASDAQCOM_rolling_15',
           'DEXJPUS_rolling_5',    'DEXJPUS_rolling_10',
          'DEXJPUS_rolling_15',     'DEXUSEU_rolling_5',
          'DEXUSEU_rolling_10',    'DEXUSEU_rolling_15',
           'DEXCHUS_rolling_5',    'DEXCHUS_rolling_10',
          'DEXCHUS_rolling_15',     'DEXUSAL_rolling_5',
          'DEXUSAL_rolling_10',    'DEXUSAL_rolling_15',
            'VIXCLS_rolling_5',     'VIXCLS_rolling_10',
           'VIXCLS_rolling_15',        'GOLD_rolling_5',
             'GOLD_rolling_10',       'GOLD_rolling_15',
           'NIFTY50_rolling_5',    'NIFTY50_rolling_10',
          'NIFTY50_rolling_15',      'SNP500_rolling_5',
           'SNP500_rolling_10',     'SNP500_rolling_15',
        'DCOILWTICO_rolling_5', 'DCOILWTICO_rolling_10',
       'DCOILWTICO_rolling_15']
In [514]:
minmaxscaler = MinMaxScaler()

fred_df[cols] = minmaxscaler.fit_transform(fred_df.loc[:, fred_df.columns.isin(cols)])

fred_df.dropna(inplace = True)
In [516]:
date_arr = fred_df['DATE']
In [517]:
fred_df.drop(columns='DATE', inplace = True)
fred_df = fred_df.reset_index()
del fred_df['index']
In [518]:
thres_hold = int(-0.10*(fred_df.shape[0]))

training_dataset = fred_df.iloc[:thres_hold, :]
test_dataset = fred_df.iloc[thres_hold:, :]
In [519]:
thres_hold
Out[519]:
-286
In [520]:
2863-286
Out[520]:
2577
In [521]:
y_train = np.array(training_dataset['OIL_PRICE_FUTURE'].values)
y_test = np.array(test_dataset['OIL_PRICE_FUTURE'].values)

X_train = training_dataset.loc[:, training_dataset.columns != 'OIL_PRICE_FUTURE']
X_test = test_dataset.loc[:, test_dataset.columns != 'OIL_PRICE_FUTURE']

X_train_arr = np.array(X_train)
X_test_arr = np.array(X_test)
In [ ]:
 
In [421]:
model_nn = Sequential()
model_nn.add(Dense(128, activation = 'sigmoid',input_shape = (X_train.shape[1],)))
model_nn.add(Dropout(0.2))
model_nn.add(BatchNormalization())
model_nn.add(Dense(128, activation = 'relu'))
model_nn.add(Dense(128, activation = 'relu'))
model_nn.add(Dense(1, activation='relu'))

opt = Adam()

model_nn.compile(optimizer=opt,  loss='mean_squared_error', metrics='mean_squared_error')
In [422]:
history_nn = model_nn.fit(X_train_arr, y_train, epochs = 100, 
             validation_data = [X_test_arr, y_test])
Epoch 1/100
2022-12-08 18:59:15.845665: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
81/81 [==============================] - ETA: 0s - loss: 2141.9353 - mean_squared_error: 2141.9353
2022-12-08 18:59:17.336821: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
81/81 [==============================] - 2s 16ms/step - loss: 2141.9353 - mean_squared_error: 2141.9353 - val_loss: 789.9691 - val_mean_squared_error: 789.9691
Epoch 2/100
81/81 [==============================] - 1s 8ms/step - loss: 68.3641 - mean_squared_error: 68.3641 - val_loss: 1424.0282 - val_mean_squared_error: 1424.0282
Epoch 3/100
81/81 [==============================] - 1s 9ms/step - loss: 55.6322 - mean_squared_error: 55.6322 - val_loss: 2350.1443 - val_mean_squared_error: 2350.1443
Epoch 4/100
81/81 [==============================] - 1s 9ms/step - loss: 44.9031 - mean_squared_error: 44.9031 - val_loss: 2572.9102 - val_mean_squared_error: 2572.9102
Epoch 5/100
81/81 [==============================] - 1s 8ms/step - loss: 46.9331 - mean_squared_error: 46.9331 - val_loss: 2659.3704 - val_mean_squared_error: 2659.3704
Epoch 6/100
81/81 [==============================] - 1s 8ms/step - loss: 40.9736 - mean_squared_error: 40.9736 - val_loss: 2586.2861 - val_mean_squared_error: 2586.2861
Epoch 7/100
81/81 [==============================] - 1s 8ms/step - loss: 43.8070 - mean_squared_error: 43.8070 - val_loss: 2688.8582 - val_mean_squared_error: 2688.8582
Epoch 8/100
81/81 [==============================] - 1s 9ms/step - loss: 36.1050 - mean_squared_error: 36.1050 - val_loss: 2453.4165 - val_mean_squared_error: 2453.4165
Epoch 9/100
81/81 [==============================] - 1s 9ms/step - loss: 39.5306 - mean_squared_error: 39.5306 - val_loss: 2463.1479 - val_mean_squared_error: 2463.1479
Epoch 10/100
81/81 [==============================] - 1s 8ms/step - loss: 40.2231 - mean_squared_error: 40.2231 - val_loss: 2400.0713 - val_mean_squared_error: 2400.0713
Epoch 11/100
81/81 [==============================] - 1s 8ms/step - loss: 36.1465 - mean_squared_error: 36.1465 - val_loss: 2391.6816 - val_mean_squared_error: 2391.6816
Epoch 12/100
81/81 [==============================] - 1s 8ms/step - loss: 39.8086 - mean_squared_error: 39.8086 - val_loss: 2546.6045 - val_mean_squared_error: 2546.6045
Epoch 13/100
81/81 [==============================] - 1s 8ms/step - loss: 38.9847 - mean_squared_error: 38.9847 - val_loss: 2263.2573 - val_mean_squared_error: 2263.2573
Epoch 14/100
81/81 [==============================] - 1s 8ms/step - loss: 38.3939 - mean_squared_error: 38.3939 - val_loss: 2292.4773 - val_mean_squared_error: 2292.4773
Epoch 15/100
81/81 [==============================] - 1s 8ms/step - loss: 50.1689 - mean_squared_error: 50.1689 - val_loss: 2202.9028 - val_mean_squared_error: 2202.9028
Epoch 16/100
81/81 [==============================] - 1s 8ms/step - loss: 40.9990 - mean_squared_error: 40.9990 - val_loss: 2093.9304 - val_mean_squared_error: 2093.9304
Epoch 17/100
81/81 [==============================] - 1s 8ms/step - loss: 33.9500 - mean_squared_error: 33.9500 - val_loss: 2255.3176 - val_mean_squared_error: 2255.3176
Epoch 18/100
81/81 [==============================] - 1s 8ms/step - loss: 40.3106 - mean_squared_error: 40.3106 - val_loss: 2166.6238 - val_mean_squared_error: 2166.6238
Epoch 19/100
81/81 [==============================] - 1s 8ms/step - loss: 33.6089 - mean_squared_error: 33.6089 - val_loss: 2244.4077 - val_mean_squared_error: 2244.4077
Epoch 20/100
81/81 [==============================] - 1s 8ms/step - loss: 39.2530 - mean_squared_error: 39.2530 - val_loss: 1874.1536 - val_mean_squared_error: 1874.1536
Epoch 21/100
81/81 [==============================] - 1s 9ms/step - loss: 46.0657 - mean_squared_error: 46.0657 - val_loss: 1942.2130 - val_mean_squared_error: 1942.2130
Epoch 22/100
81/81 [==============================] - 1s 8ms/step - loss: 34.0823 - mean_squared_error: 34.0823 - val_loss: 1978.8463 - val_mean_squared_error: 1978.8463
Epoch 23/100
81/81 [==============================] - 1s 8ms/step - loss: 40.4616 - mean_squared_error: 40.4616 - val_loss: 2099.3237 - val_mean_squared_error: 2099.3237
Epoch 24/100
81/81 [==============================] - 1s 8ms/step - loss: 37.6497 - mean_squared_error: 37.6497 - val_loss: 1977.0371 - val_mean_squared_error: 1977.0371
Epoch 25/100
81/81 [==============================] - 1s 8ms/step - loss: 40.2686 - mean_squared_error: 40.2686 - val_loss: 1821.4744 - val_mean_squared_error: 1821.4744
Epoch 26/100
81/81 [==============================] - 1s 8ms/step - loss: 36.4280 - mean_squared_error: 36.4280 - val_loss: 1839.4359 - val_mean_squared_error: 1839.4359
Epoch 27/100
81/81 [==============================] - 1s 8ms/step - loss: 31.4147 - mean_squared_error: 31.4147 - val_loss: 1704.6852 - val_mean_squared_error: 1704.6852
Epoch 28/100
81/81 [==============================] - 1s 8ms/step - loss: 36.0987 - mean_squared_error: 36.0987 - val_loss: 1881.7751 - val_mean_squared_error: 1881.7751
Epoch 29/100
81/81 [==============================] - 1s 8ms/step - loss: 41.7324 - mean_squared_error: 41.7324 - val_loss: 1739.5616 - val_mean_squared_error: 1739.5616
Epoch 30/100
81/81 [==============================] - 1s 8ms/step - loss: 31.5844 - mean_squared_error: 31.5844 - val_loss: 1804.2080 - val_mean_squared_error: 1804.2080
Epoch 31/100
81/81 [==============================] - 1s 8ms/step - loss: 30.2529 - mean_squared_error: 30.2529 - val_loss: 1746.2307 - val_mean_squared_error: 1746.2307
Epoch 32/100
81/81 [==============================] - 1s 8ms/step - loss: 38.4858 - mean_squared_error: 38.4858 - val_loss: 1668.5758 - val_mean_squared_error: 1668.5758
Epoch 33/100
81/81 [==============================] - 1s 8ms/step - loss: 39.7019 - mean_squared_error: 39.7019 - val_loss: 1558.1949 - val_mean_squared_error: 1558.1949
Epoch 34/100
81/81 [==============================] - 1s 8ms/step - loss: 34.1647 - mean_squared_error: 34.1647 - val_loss: 1525.0774 - val_mean_squared_error: 1525.0774
Epoch 35/100
81/81 [==============================] - 1s 8ms/step - loss: 36.8754 - mean_squared_error: 36.8754 - val_loss: 1605.1414 - val_mean_squared_error: 1605.1414
Epoch 36/100
81/81 [==============================] - 1s 8ms/step - loss: 36.4343 - mean_squared_error: 36.4343 - val_loss: 1689.4928 - val_mean_squared_error: 1689.4928
Epoch 37/100
81/81 [==============================] - 1s 8ms/step - loss: 39.9945 - mean_squared_error: 39.9945 - val_loss: 1561.3973 - val_mean_squared_error: 1561.3973
Epoch 38/100
81/81 [==============================] - 1s 8ms/step - loss: 35.2830 - mean_squared_error: 35.2830 - val_loss: 1473.7559 - val_mean_squared_error: 1473.7559
Epoch 39/100
81/81 [==============================] - 1s 8ms/step - loss: 30.1815 - mean_squared_error: 30.1815 - val_loss: 1371.1498 - val_mean_squared_error: 1371.1498
Epoch 40/100
81/81 [==============================] - 1s 8ms/step - loss: 30.9531 - mean_squared_error: 30.9531 - val_loss: 1337.1477 - val_mean_squared_error: 1337.1477
Epoch 41/100
81/81 [==============================] - 1s 8ms/step - loss: 36.3538 - mean_squared_error: 36.3538 - val_loss: 1474.6890 - val_mean_squared_error: 1474.6890
Epoch 42/100
81/81 [==============================] - 1s 8ms/step - loss: 35.4032 - mean_squared_error: 35.4032 - val_loss: 1354.6710 - val_mean_squared_error: 1354.6710
Epoch 43/100
81/81 [==============================] - 1s 8ms/step - loss: 29.9832 - mean_squared_error: 29.9832 - val_loss: 1444.3213 - val_mean_squared_error: 1444.3213
Epoch 44/100
81/81 [==============================] - 1s 8ms/step - loss: 31.5350 - mean_squared_error: 31.5350 - val_loss: 1360.8358 - val_mean_squared_error: 1360.8358
Epoch 45/100
81/81 [==============================] - 1s 8ms/step - loss: 32.2007 - mean_squared_error: 32.2007 - val_loss: 1396.8754 - val_mean_squared_error: 1396.8754
Epoch 46/100
81/81 [==============================] - 1s 8ms/step - loss: 28.8859 - mean_squared_error: 28.8859 - val_loss: 1310.1875 - val_mean_squared_error: 1310.1875
Epoch 47/100
81/81 [==============================] - 1s 8ms/step - loss: 34.4895 - mean_squared_error: 34.4895 - val_loss: 1390.2069 - val_mean_squared_error: 1390.2069
Epoch 48/100
81/81 [==============================] - 1s 8ms/step - loss: 28.1233 - mean_squared_error: 28.1233 - val_loss: 1386.4011 - val_mean_squared_error: 1386.4011
Epoch 49/100
81/81 [==============================] - 1s 8ms/step - loss: 36.5047 - mean_squared_error: 36.5047 - val_loss: 1552.4806 - val_mean_squared_error: 1552.4806
Epoch 50/100
81/81 [==============================] - 1s 8ms/step - loss: 34.2777 - mean_squared_error: 34.2777 - val_loss: 1356.7319 - val_mean_squared_error: 1356.7319
Epoch 51/100
81/81 [==============================] - 1s 8ms/step - loss: 28.6670 - mean_squared_error: 28.6670 - val_loss: 1433.2153 - val_mean_squared_error: 1433.2153
Epoch 52/100
81/81 [==============================] - 1s 8ms/step - loss: 32.3528 - mean_squared_error: 32.3528 - val_loss: 1352.4567 - val_mean_squared_error: 1352.4567
Epoch 53/100
81/81 [==============================] - 1s 8ms/step - loss: 30.9044 - mean_squared_error: 30.9044 - val_loss: 1200.0422 - val_mean_squared_error: 1200.0422
Epoch 54/100
81/81 [==============================] - 1s 8ms/step - loss: 25.8220 - mean_squared_error: 25.8220 - val_loss: 1460.2491 - val_mean_squared_error: 1460.2491
Epoch 55/100
81/81 [==============================] - 1s 8ms/step - loss: 30.9864 - mean_squared_error: 30.9864 - val_loss: 1316.4396 - val_mean_squared_error: 1316.4396
Epoch 56/100
81/81 [==============================] - 1s 8ms/step - loss: 24.1909 - mean_squared_error: 24.1909 - val_loss: 1150.8031 - val_mean_squared_error: 1150.8031
Epoch 57/100
81/81 [==============================] - 1s 8ms/step - loss: 29.3100 - mean_squared_error: 29.3100 - val_loss: 1306.3197 - val_mean_squared_error: 1306.3197
Epoch 58/100
81/81 [==============================] - 1s 8ms/step - loss: 26.4210 - mean_squared_error: 26.4210 - val_loss: 1105.5020 - val_mean_squared_error: 1105.5020
Epoch 59/100
81/81 [==============================] - 1s 8ms/step - loss: 25.7086 - mean_squared_error: 25.7086 - val_loss: 1310.8149 - val_mean_squared_error: 1310.8149
Epoch 60/100
81/81 [==============================] - 1s 8ms/step - loss: 27.5144 - mean_squared_error: 27.5144 - val_loss: 1163.6737 - val_mean_squared_error: 1163.6737
Epoch 61/100
81/81 [==============================] - 1s 8ms/step - loss: 28.8534 - mean_squared_error: 28.8534 - val_loss: 1319.4238 - val_mean_squared_error: 1319.4238
Epoch 62/100
81/81 [==============================] - 1s 8ms/step - loss: 28.1363 - mean_squared_error: 28.1363 - val_loss: 1135.4771 - val_mean_squared_error: 1135.4771
Epoch 63/100
81/81 [==============================] - 1s 8ms/step - loss: 24.2461 - mean_squared_error: 24.2461 - val_loss: 1189.6490 - val_mean_squared_error: 1189.6490
Epoch 64/100
81/81 [==============================] - 1s 8ms/step - loss: 24.9741 - mean_squared_error: 24.9741 - val_loss: 1069.7906 - val_mean_squared_error: 1069.7906
Epoch 65/100
81/81 [==============================] - 1s 8ms/step - loss: 27.7958 - mean_squared_error: 27.7958 - val_loss: 1294.2358 - val_mean_squared_error: 1294.2358
Epoch 66/100
81/81 [==============================] - 1s 8ms/step - loss: 29.2834 - mean_squared_error: 29.2834 - val_loss: 1398.5321 - val_mean_squared_error: 1398.5321
Epoch 67/100
81/81 [==============================] - 1s 8ms/step - loss: 25.9765 - mean_squared_error: 25.9765 - val_loss: 1182.9081 - val_mean_squared_error: 1182.9081
Epoch 68/100
81/81 [==============================] - 1s 8ms/step - loss: 25.7663 - mean_squared_error: 25.7663 - val_loss: 999.2703 - val_mean_squared_error: 999.2703
Epoch 69/100
81/81 [==============================] - 1s 8ms/step - loss: 25.1117 - mean_squared_error: 25.1117 - val_loss: 1116.2234 - val_mean_squared_error: 1116.2234
Epoch 70/100
81/81 [==============================] - 1s 8ms/step - loss: 23.9512 - mean_squared_error: 23.9512 - val_loss: 1231.0388 - val_mean_squared_error: 1231.0388
Epoch 71/100
81/81 [==============================] - 1s 8ms/step - loss: 25.2272 - mean_squared_error: 25.2272 - val_loss: 1356.0115 - val_mean_squared_error: 1356.0115
Epoch 72/100
81/81 [==============================] - 1s 8ms/step - loss: 29.8558 - mean_squared_error: 29.8558 - val_loss: 1058.7250 - val_mean_squared_error: 1058.7250
Epoch 73/100
81/81 [==============================] - 1s 8ms/step - loss: 25.5699 - mean_squared_error: 25.5699 - val_loss: 1200.6554 - val_mean_squared_error: 1200.6554
Epoch 74/100
81/81 [==============================] - 1s 7ms/step - loss: 23.0083 - mean_squared_error: 23.0083 - val_loss: 990.7622 - val_mean_squared_error: 990.7622
Epoch 75/100
81/81 [==============================] - 1s 8ms/step - loss: 26.2234 - mean_squared_error: 26.2234 - val_loss: 1250.8419 - val_mean_squared_error: 1250.8419
Epoch 76/100
81/81 [==============================] - 1s 8ms/step - loss: 25.4068 - mean_squared_error: 25.4068 - val_loss: 1213.6549 - val_mean_squared_error: 1213.6549
Epoch 77/100
81/81 [==============================] - 1s 8ms/step - loss: 21.6958 - mean_squared_error: 21.6958 - val_loss: 1071.0066 - val_mean_squared_error: 1071.0066
Epoch 78/100
81/81 [==============================] - 1s 8ms/step - loss: 24.3737 - mean_squared_error: 24.3737 - val_loss: 1260.2667 - val_mean_squared_error: 1260.2667
Epoch 79/100
81/81 [==============================] - 1s 8ms/step - loss: 25.3876 - mean_squared_error: 25.3876 - val_loss: 997.0605 - val_mean_squared_error: 997.0605
Epoch 80/100
81/81 [==============================] - 1s 8ms/step - loss: 25.0979 - mean_squared_error: 25.0979 - val_loss: 991.0155 - val_mean_squared_error: 991.0155
Epoch 81/100
81/81 [==============================] - 1s 8ms/step - loss: 25.1079 - mean_squared_error: 25.1079 - val_loss: 1331.0759 - val_mean_squared_error: 1331.0759
Epoch 82/100
81/81 [==============================] - 1s 8ms/step - loss: 28.8023 - mean_squared_error: 28.8023 - val_loss: 1122.1605 - val_mean_squared_error: 1122.1605
Epoch 83/100
81/81 [==============================] - 1s 7ms/step - loss: 24.1440 - mean_squared_error: 24.1440 - val_loss: 1142.1545 - val_mean_squared_error: 1142.1545
Epoch 84/100
81/81 [==============================] - 1s 8ms/step - loss: 21.3542 - mean_squared_error: 21.3542 - val_loss: 985.0016 - val_mean_squared_error: 985.0016
Epoch 85/100
81/81 [==============================] - 1s 8ms/step - loss: 26.4933 - mean_squared_error: 26.4933 - val_loss: 994.9327 - val_mean_squared_error: 994.9327
Epoch 86/100
81/81 [==============================] - 1s 8ms/step - loss: 24.1924 - mean_squared_error: 24.1924 - val_loss: 907.5856 - val_mean_squared_error: 907.5856
Epoch 87/100
81/81 [==============================] - 1s 8ms/step - loss: 22.7970 - mean_squared_error: 22.7970 - val_loss: 1129.4351 - val_mean_squared_error: 1129.4351
Epoch 88/100
81/81 [==============================] - 1s 8ms/step - loss: 20.5482 - mean_squared_error: 20.5482 - val_loss: 1213.1460 - val_mean_squared_error: 1213.1460
Epoch 89/100
81/81 [==============================] - 1s 8ms/step - loss: 22.5329 - mean_squared_error: 22.5329 - val_loss: 890.5938 - val_mean_squared_error: 890.5938
Epoch 90/100
81/81 [==============================] - 1s 8ms/step - loss: 24.8355 - mean_squared_error: 24.8355 - val_loss: 1072.7542 - val_mean_squared_error: 1072.7542
Epoch 91/100
81/81 [==============================] - 1s 8ms/step - loss: 23.2924 - mean_squared_error: 23.2924 - val_loss: 974.1718 - val_mean_squared_error: 974.1718
Epoch 92/100
81/81 [==============================] - 1s 8ms/step - loss: 22.4554 - mean_squared_error: 22.4554 - val_loss: 1184.1666 - val_mean_squared_error: 1184.1666
Epoch 93/100
81/81 [==============================] - 1s 8ms/step - loss: 23.5137 - mean_squared_error: 23.5137 - val_loss: 985.9199 - val_mean_squared_error: 985.9199
Epoch 94/100
81/81 [==============================] - 1s 8ms/step - loss: 21.2329 - mean_squared_error: 21.2329 - val_loss: 1239.7701 - val_mean_squared_error: 1239.7701
Epoch 95/100
81/81 [==============================] - 1s 8ms/step - loss: 22.6872 - mean_squared_error: 22.6872 - val_loss: 891.4275 - val_mean_squared_error: 891.4275
Epoch 96/100
81/81 [==============================] - 1s 8ms/step - loss: 21.9632 - mean_squared_error: 21.9632 - val_loss: 1032.4028 - val_mean_squared_error: 1032.4028
Epoch 97/100
81/81 [==============================] - 1s 8ms/step - loss: 24.1657 - mean_squared_error: 24.1657 - val_loss: 1073.2421 - val_mean_squared_error: 1073.2421
Epoch 98/100
81/81 [==============================] - 1s 7ms/step - loss: 24.6974 - mean_squared_error: 24.6974 - val_loss: 1040.9249 - val_mean_squared_error: 1040.9249
Epoch 99/100
81/81 [==============================] - 1s 8ms/step - loss: 23.7445 - mean_squared_error: 23.7445 - val_loss: 1112.2826 - val_mean_squared_error: 1112.2826
Epoch 100/100
81/81 [==============================] - 1s 8ms/step - loss: 23.2229 - mean_squared_error: 23.2229 - val_loss: 1071.1755 - val_mean_squared_error: 1071.1755
In [423]:
plt.plot(history_nn.history['loss'])
plt.plot(history_nn.history['val_loss'], label = 'Validation Loss')
plt.legend()
Out[423]:
<matplotlib.legend.Legend at 0x2a2198a30>
In [424]:
ynew_nn_train_no_covid = model_nn.predict(X_train)
ynew_nn_test_no_covid = model_nn.predict(X_test_arr)
2022-12-08 19:00:20.262223: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:20.562490: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
In [425]:
model_lstm = Sequential()

model_lstm.add(LSTM(128, input_shape=(X_train.shape[1],1), return_sequences=True))
model_lstm.add(Dropout(0.2))
model_lstm.add(BatchNormalization())  #normalizes activation outputs, same reason you want to normalize your input data.

model_lstm.add(LSTM(128, return_sequences=True))
model_lstm.add(Dropout(0.1))
model_lstm.add(BatchNormalization())

model_lstm.add(LSTM(128))
model_lstm.add(Dropout(0.2))
model_lstm.add(BatchNormalization())

model_lstm.add(Dense(32, activation='relu'))
model_lstm.add(Dropout(0.2))

model_lstm.add(Dense(1, activation='relu'))

opt = tf.keras.optimizers.Adam(lr=0.001, decay=1e-6)

# Compile model
model_lstm.compile(
    loss='mean_squared_error',
    optimizer=opt,
    metrics=['mean_squared_error']
)
In [426]:
history1 = model_lstm.fit(
        X_train, y_train,
    batch_size=32,
    epochs=70,
    validation_data=(X_test, y_test),
)
Epoch 1/70
2022-12-08 19:00:21.872835: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:22.519895: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:22.711214: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:22.895594: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:23.193208: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:23.510961: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:23.822202: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
81/81 [==============================] - ETA: 0s - loss: 5078.1846 - mean_squared_error: 5078.1846
2022-12-08 19:00:31.248651: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:31.416922: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:31.551161: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 19:00:31.680393: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
81/81 [==============================] - 12s 105ms/step - loss: 5078.1846 - mean_squared_error: 5078.1846 - val_loss: 1187.8888 - val_mean_squared_error: 1187.8888
Epoch 2/70
81/81 [==============================] - 5s 65ms/step - loss: 2846.5046 - mean_squared_error: 2846.5046 - val_loss: 43.9136 - val_mean_squared_error: 43.9136
Epoch 3/70
81/81 [==============================] - 5s 65ms/step - loss: 1134.0372 - mean_squared_error: 1134.0372 - val_loss: 309.2834 - val_mean_squared_error: 309.2834
Epoch 4/70
81/81 [==============================] - 5s 65ms/step - loss: 956.2455 - mean_squared_error: 956.2455 - val_loss: 387.3318 - val_mean_squared_error: 387.3318
Epoch 5/70
81/81 [==============================] - 5s 64ms/step - loss: 953.9997 - mean_squared_error: 953.9997 - val_loss: 395.0396 - val_mean_squared_error: 395.0396
Epoch 6/70
81/81 [==============================] - 5s 63ms/step - loss: 971.4512 - mean_squared_error: 971.4512 - val_loss: 317.0563 - val_mean_squared_error: 317.0563
Epoch 7/70
81/81 [==============================] - 5s 65ms/step - loss: 948.0082 - mean_squared_error: 948.0082 - val_loss: 1171.3271 - val_mean_squared_error: 1171.3271
Epoch 8/70
81/81 [==============================] - 5s 64ms/step - loss: 937.8155 - mean_squared_error: 937.8155 - val_loss: 359.8979 - val_mean_squared_error: 359.8979
Epoch 9/70
81/81 [==============================] - 5s 64ms/step - loss: 982.0042 - mean_squared_error: 982.0042 - val_loss: 389.1930 - val_mean_squared_error: 389.1930
Epoch 10/70
81/81 [==============================] - 5s 65ms/step - loss: 949.7712 - mean_squared_error: 949.7712 - val_loss: 297.1394 - val_mean_squared_error: 297.1394
Epoch 11/70
81/81 [==============================] - 5s 64ms/step - loss: 943.4914 - mean_squared_error: 943.4914 - val_loss: 372.6968 - val_mean_squared_error: 372.6968
Epoch 12/70
81/81 [==============================] - 5s 65ms/step - loss: 950.0507 - mean_squared_error: 950.0507 - val_loss: 327.0080 - val_mean_squared_error: 327.0080
Epoch 13/70
81/81 [==============================] - 5s 65ms/step - loss: 958.1531 - mean_squared_error: 958.1531 - val_loss: 322.0714 - val_mean_squared_error: 322.0714
Epoch 14/70
81/81 [==============================] - 5s 65ms/step - loss: 936.3499 - mean_squared_error: 936.3499 - val_loss: 380.3257 - val_mean_squared_error: 380.3257
Epoch 15/70
81/81 [==============================] - 5s 66ms/step - loss: 933.3079 - mean_squared_error: 933.3079 - val_loss: 379.4620 - val_mean_squared_error: 379.4620
Epoch 16/70
81/81 [==============================] - 5s 64ms/step - loss: 956.9951 - mean_squared_error: 956.9951 - val_loss: 339.2282 - val_mean_squared_error: 339.2282
Epoch 17/70
81/81 [==============================] - 5s 65ms/step - loss: 935.0949 - mean_squared_error: 935.0949 - val_loss: 336.4242 - val_mean_squared_error: 336.4242
Epoch 18/70
81/81 [==============================] - 5s 66ms/step - loss: 928.8482 - mean_squared_error: 928.8482 - val_loss: 331.8503 - val_mean_squared_error: 331.8503
Epoch 19/70
81/81 [==============================] - 5s 65ms/step - loss: 934.8130 - mean_squared_error: 934.8130 - val_loss: 369.2744 - val_mean_squared_error: 369.2744
Epoch 20/70
81/81 [==============================] - 5s 65ms/step - loss: 930.5314 - mean_squared_error: 930.5314 - val_loss: 357.9601 - val_mean_squared_error: 357.9601
Epoch 21/70
81/81 [==============================] - 5s 66ms/step - loss: 921.0010 - mean_squared_error: 921.0010 - val_loss: 318.4083 - val_mean_squared_error: 318.4083
Epoch 22/70
81/81 [==============================] - 5s 65ms/step - loss: 925.7801 - mean_squared_error: 925.7801 - val_loss: 346.4725 - val_mean_squared_error: 346.4725
Epoch 23/70
81/81 [==============================] - 5s 65ms/step - loss: 944.3795 - mean_squared_error: 944.3795 - val_loss: 397.6263 - val_mean_squared_error: 397.6263
Epoch 24/70
81/81 [==============================] - 5s 66ms/step - loss: 925.0305 - mean_squared_error: 925.0305 - val_loss: 350.3599 - val_mean_squared_error: 350.3599
Epoch 25/70
81/81 [==============================] - 5s 66ms/step - loss: 925.3257 - mean_squared_error: 925.3257 - val_loss: 362.1077 - val_mean_squared_error: 362.1077
Epoch 26/70
81/81 [==============================] - 5s 67ms/step - loss: 947.4827 - mean_squared_error: 947.4827 - val_loss: 299.3124 - val_mean_squared_error: 299.3124
Epoch 27/70
81/81 [==============================] - 5s 66ms/step - loss: 970.0995 - mean_squared_error: 970.0995 - val_loss: 341.9233 - val_mean_squared_error: 341.9233
Epoch 28/70
81/81 [==============================] - 5s 65ms/step - loss: 951.3546 - mean_squared_error: 951.3546 - val_loss: 372.1447 - val_mean_squared_error: 372.1447
Epoch 29/70
81/81 [==============================] - 5s 67ms/step - loss: 936.5093 - mean_squared_error: 936.5093 - val_loss: 282.9343 - val_mean_squared_error: 282.9343
Epoch 30/70
81/81 [==============================] - 5s 66ms/step - loss: 930.6240 - mean_squared_error: 930.6240 - val_loss: 343.7619 - val_mean_squared_error: 343.7619
Epoch 31/70
81/81 [==============================] - 5s 65ms/step - loss: 974.2706 - mean_squared_error: 974.2706 - val_loss: 329.9229 - val_mean_squared_error: 329.9229
Epoch 32/70
81/81 [==============================] - 5s 67ms/step - loss: 918.9877 - mean_squared_error: 918.9877 - val_loss: 335.3900 - val_mean_squared_error: 335.3900
Epoch 33/70
81/81 [==============================] - 5s 66ms/step - loss: 902.2036 - mean_squared_error: 902.2036 - val_loss: 2732.2244 - val_mean_squared_error: 2732.2244
Epoch 34/70
81/81 [==============================] - 5s 67ms/step - loss: 694.7268 - mean_squared_error: 694.7268 - val_loss: 3164.6384 - val_mean_squared_error: 3164.6384
Epoch 35/70
81/81 [==============================] - 5s 66ms/step - loss: 175.3465 - mean_squared_error: 175.3465 - val_loss: 552.2146 - val_mean_squared_error: 552.2146
Epoch 36/70
81/81 [==============================] - 5s 66ms/step - loss: 112.6975 - mean_squared_error: 112.6975 - val_loss: 230.3945 - val_mean_squared_error: 230.3945
Epoch 37/70
81/81 [==============================] - 5s 67ms/step - loss: 79.2129 - mean_squared_error: 79.2129 - val_loss: 154.8596 - val_mean_squared_error: 154.8596
Epoch 38/70
81/81 [==============================] - 5s 66ms/step - loss: 67.8234 - mean_squared_error: 67.8234 - val_loss: 225.6782 - val_mean_squared_error: 225.6782
Epoch 39/70
81/81 [==============================] - 5s 66ms/step - loss: 54.3347 - mean_squared_error: 54.3347 - val_loss: 48.8418 - val_mean_squared_error: 48.8418
Epoch 40/70
81/81 [==============================] - 5s 68ms/step - loss: 59.8094 - mean_squared_error: 59.8094 - val_loss: 363.1964 - val_mean_squared_error: 363.1964
Epoch 41/70
81/81 [==============================] - 5s 66ms/step - loss: 61.1363 - mean_squared_error: 61.1363 - val_loss: 140.3769 - val_mean_squared_error: 140.3769
Epoch 42/70
81/81 [==============================] - 5s 66ms/step - loss: 56.2780 - mean_squared_error: 56.2780 - val_loss: 39.6500 - val_mean_squared_error: 39.6500
Epoch 43/70
81/81 [==============================] - 5s 67ms/step - loss: 47.3156 - mean_squared_error: 47.3156 - val_loss: 120.8634 - val_mean_squared_error: 120.8634
Epoch 44/70
81/81 [==============================] - 5s 65ms/step - loss: 49.2422 - mean_squared_error: 49.2422 - val_loss: 357.9386 - val_mean_squared_error: 357.9386
Epoch 45/70
81/81 [==============================] - 5s 66ms/step - loss: 46.2718 - mean_squared_error: 46.2718 - val_loss: 90.2449 - val_mean_squared_error: 90.2449
Epoch 46/70
81/81 [==============================] - 5s 67ms/step - loss: 45.8662 - mean_squared_error: 45.8662 - val_loss: 120.8908 - val_mean_squared_error: 120.8908
Epoch 47/70
81/81 [==============================] - 5s 66ms/step - loss: 45.1513 - mean_squared_error: 45.1513 - val_loss: 211.3099 - val_mean_squared_error: 211.3099
Epoch 48/70
81/81 [==============================] - 5s 67ms/step - loss: 44.5597 - mean_squared_error: 44.5597 - val_loss: 202.8982 - val_mean_squared_error: 202.8982
Epoch 49/70
81/81 [==============================] - 5s 65ms/step - loss: 43.0823 - mean_squared_error: 43.0823 - val_loss: 124.8757 - val_mean_squared_error: 124.8757
Epoch 50/70
81/81 [==============================] - 5s 65ms/step - loss: 49.9067 - mean_squared_error: 49.9067 - val_loss: 344.8904 - val_mean_squared_error: 344.8904
Epoch 51/70
81/81 [==============================] - 5s 66ms/step - loss: 42.3588 - mean_squared_error: 42.3588 - val_loss: 105.0019 - val_mean_squared_error: 105.0019
Epoch 52/70
81/81 [==============================] - 5s 65ms/step - loss: 45.9779 - mean_squared_error: 45.9779 - val_loss: 243.8462 - val_mean_squared_error: 243.8462
Epoch 53/70
81/81 [==============================] - 5s 66ms/step - loss: 42.6364 - mean_squared_error: 42.6364 - val_loss: 79.9945 - val_mean_squared_error: 79.9945
Epoch 54/70
81/81 [==============================] - 5s 67ms/step - loss: 39.4987 - mean_squared_error: 39.4987 - val_loss: 437.6313 - val_mean_squared_error: 437.6313
Epoch 55/70
81/81 [==============================] - 5s 66ms/step - loss: 41.5737 - mean_squared_error: 41.5737 - val_loss: 193.9876 - val_mean_squared_error: 193.9876
Epoch 56/70
81/81 [==============================] - 5s 66ms/step - loss: 42.6808 - mean_squared_error: 42.6808 - val_loss: 71.3496 - val_mean_squared_error: 71.3496
Epoch 57/70
81/81 [==============================] - 5s 66ms/step - loss: 43.8117 - mean_squared_error: 43.8117 - val_loss: 152.4436 - val_mean_squared_error: 152.4436
Epoch 58/70
81/81 [==============================] - 5s 65ms/step - loss: 43.1377 - mean_squared_error: 43.1377 - val_loss: 144.1826 - val_mean_squared_error: 144.1826
Epoch 59/70
81/81 [==============================] - 5s 67ms/step - loss: 35.4562 - mean_squared_error: 35.4562 - val_loss: 182.3024 - val_mean_squared_error: 182.3024
Epoch 60/70
81/81 [==============================] - 5s 66ms/step - loss: 37.0230 - mean_squared_error: 37.0230 - val_loss: 84.3092 - val_mean_squared_error: 84.3092
Epoch 61/70
81/81 [==============================] - 5s 66ms/step - loss: 34.9535 - mean_squared_error: 34.9535 - val_loss: 149.8119 - val_mean_squared_error: 149.8119
Epoch 62/70
81/81 [==============================] - 5s 67ms/step - loss: 39.2291 - mean_squared_error: 39.2291 - val_loss: 239.8552 - val_mean_squared_error: 239.8552
Epoch 63/70
81/81 [==============================] - 5s 65ms/step - loss: 36.8254 - mean_squared_error: 36.8254 - val_loss: 201.3791 - val_mean_squared_error: 201.3791
Epoch 64/70
81/81 [==============================] - 5s 66ms/step - loss: 38.5813 - mean_squared_error: 38.5813 - val_loss: 171.8175 - val_mean_squared_error: 171.8175
Epoch 65/70
81/81 [==============================] - 5s 67ms/step - loss: 45.8460 - mean_squared_error: 45.8460 - val_loss: 187.8319 - val_mean_squared_error: 187.8319
Epoch 66/70
81/81 [==============================] - 5s 66ms/step - loss: 39.7563 - mean_squared_error: 39.7563 - val_loss: 422.3631 - val_mean_squared_error: 422.3631
Epoch 67/70
81/81 [==============================] - 5s 66ms/step - loss: 44.0013 - mean_squared_error: 44.0013 - val_loss: 129.2835 - val_mean_squared_error: 129.2835
Epoch 68/70
81/81 [==============================] - 5s 67ms/step - loss: 40.9389 - mean_squared_error: 40.9389 - val_loss: 265.3925 - val_mean_squared_error: 265.3925
Epoch 69/70
81/81 [==============================] - 5s 66ms/step - loss: 40.0038 - mean_squared_error: 40.0038 - val_loss: 48.2723 - val_mean_squared_error: 48.2723
Epoch 70/70
81/81 [==============================] - 5s 67ms/step - loss: 33.5394 - mean_squared_error: 33.5394 - val_loss: 89.2916 - val_mean_squared_error: 89.2916
In [433]:
plt.plot(history1.history['loss'])
plt.plot(history1.history['val_loss'], label = 'Validation Loss')
plt.legend()
Out[433]:
<matplotlib.legend.Legend at 0x2a101a760>
In [434]:
ynew1_train_no_covid = model_lstm.predict(X_train)
ynew1_test_no_covid = model_lstm.predict(X_test_arr)
In [435]:
plt.figure(figsize = (12,6), dpi = 400)
plt.plot(y_train, label = 'Actual')
plt.plot(ynew_nn_train_no_covid, label = 'Model 1')
plt.plot(ynew1_train_no_covid, label = 'Model 2')
plt.legend()
Out[435]:
<matplotlib.legend.Legend at 0x40f719bb0>
In [438]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot(y_test, label = 'Actual')
plt.plot(ynew_nn_test_no_covid, label = 'ANN')
plt.plot(ynew1_test_no_covid, label = 'LSTM')
plt.legend()
Out[438]:
<matplotlib.legend.Legend at 0x43407f5e0>
In [ ]:
 
In [129]:
#ANN Network Training
In [132]:
print(pygad.__version__)
2.18.1
In [150]:
%%timeit

function_inputs = [4, -2, 3.5, 5, -11, -4.7]
desired_output = 44


def fitness_func(solution, solution_idx):
    output = np.sum(solution*function_inputs)
    fitness = 1/(np.square(output - desired_output) + 0.000001)
    return fitness

num_generations = 100
num_parents_mating = 10
sol_per_pop = 20

num_genes = len(function_inputs)

ga_instance = pygad.GA(num_generations = num_generations, num_parents_mating= num_parents_mating, fitness_func= fitness_func, 
                      sol_per_pop=sol_per_pop, num_genes=num_genes)

ga_instance.run()
34.9 ms ± 172 µs per loop (mean ± std. dev. of 7 runs, 10 loops each)
In [152]:
#ANN Trained using LSTM
In [162]:
keras_ga = pygad.kerasga.KerasGA(model = model_nn, num_solutions= 10)
In [163]:
model_nn.summary()
Model: "sequential_8"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_16 (Dense)            (None, 128)               10496     
                                                                 
 dropout_23 (Dropout)        (None, 128)               0         
                                                                 
 batch_normalization_18 (Bat  (None, 128)              512       
 chNormalization)                                                
                                                                 
 dense_17 (Dense)            (None, 128)               16512     
                                                                 
 dense_18 (Dense)            (None, 128)               16512     
                                                                 
 dense_19 (Dense)            (None, 1)                 129       
                                                                 
=================================================================
Total params: 44,161
Trainable params: 43,905
Non-trainable params: 256
_________________________________________________________________
In [401]:
model_nn = Sequential()
model_nn.add(Dense(64, activation = 'relu',input_shape = (X_train.shape[1],)))
model_nn.add(Dense(64, activation = 'relu'))
model_nn.add(Dense(32, activation = 'relu'))
model_nn.add(Dense(1, activation='relu'))

#model_nn.compile(optimizer=opt,  loss='mean_squared_error', metrics='mean_squared_error')
In [403]:
def fitness_func(solution, solution_idx):
    global X_train_arr, y_train, keras_ga, model_nn
    model_weights_matrix = pygad.kerasga.model_weights_as_matrix(model = model_nn, weights_vector = solution)
    model_nn.set_weights(weights = model_weights_matrix)
    predictions = model_nn.predict(X_train_arr)
    mae = tf.keras.losses.MeanAbsoluteError()
    solution_fitness = 1/(mae(y_train, predictions).numpy() + 0.00001)
    return solution_fitness

weights_vector = pygad.kerasga.model_weights_as_vector(model=model_nn)
keras_ga = pygad.kerasga.KerasGA(model=model_nn,
                                 num_solutions=10 )

def callback_generation(ga_instance):
    print(f'Generation: {ga_instance.generations_completed}')
    print(f'Fitness: {ga_instance.best_solution()[1]}')
    
num_generations = 350
num_parents_mating = 10
initial_population = keras_ga.population_weights
crossover_type = "single_point"
mutation_type = "random"
mutation_percent_genes = 50


ga_instance = pygad.GA(num_generations= num_generations, num_parents_mating=num_parents_mating, 
                       initial_population=initial_population, fitness_func = fitness_func, on_generation= callback_generation,
                       crossover_type=crossover_type, mutation_type=mutation_type, mutation_percent_genes=mutation_percent_genes)
In [404]:
ga_instance.run()
2022-12-08 17:51:37.911042: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
Generation: 1
Fitness: 0.016473667592074862
Generation: 2
Fitness: 0.028408315634407665
Generation: 3
Fitness: 0.028408315634407665
Generation: 4
Fitness: 0.028408315634407665
Generation: 5
Fitness: 0.028408315634407665
Generation: 6
Fitness: 0.028408315634407665
Generation: 7
Fitness: 0.028408315634407665
Generation: 8
Fitness: 0.028408315634407665
Generation: 9
Fitness: 0.028408315634407665
Generation: 10
Fitness: 0.028408315634407665
Generation: 11
Fitness: 0.028408315634407665
Generation: 12
Fitness: 0.028408315634407665
Generation: 13
Fitness: 0.028408315634407665
Generation: 14
Fitness: 0.028408315634407665
Generation: 15
Fitness: 0.028408315634407665
Generation: 16
Fitness: 0.028408315634407665
Generation: 17
Fitness: 0.028408315634407665
Generation: 18
Fitness: 0.028408315634407665
Generation: 19
Fitness: 0.028408315634407665
Generation: 20
Fitness: 0.028408315634407665
Generation: 21
Fitness: 0.028408315634407665
Generation: 22
Fitness: 0.028408315634407665
Generation: 23
Fitness: 0.028408315634407665
Generation: 24
Fitness: 0.028408315634407665
Generation: 25
Fitness: 0.028408315634407665
Generation: 26
Fitness: 0.028408315634407665
Generation: 27
Fitness: 0.028408315634407665
Generation: 28
Fitness: 0.028408315634407665
Generation: 29
Fitness: 0.028408315634407665
Generation: 30
Fitness: 0.028408315634407665
Generation: 31
Fitness: 0.028408315634407665
Generation: 32
Fitness: 0.028408315634407665
Generation: 33
Fitness: 0.028408315634407665
Generation: 34
Fitness: 0.028408315634407665
Generation: 35
Fitness: 0.028408315634407665
Generation: 36
Fitness: 0.028408315634407665
Generation: 37
Fitness: 0.028408315634407665
Generation: 38
Fitness: 0.028408315634407665
Generation: 39
Fitness: 0.028408315634407665
Generation: 40
Fitness: 0.028408315634407665
Generation: 41
Fitness: 0.028408315634407665
Generation: 42
Fitness: 0.028408315634407665
Generation: 43
Fitness: 0.028408315634407665
Generation: 44
Fitness: 0.028408315634407665
Generation: 45
Fitness: 0.028408315634407665
Generation: 46
Fitness: 0.028408315634407665
Generation: 47
Fitness: 0.028408315634407665
Generation: 48
Fitness: 0.028408315634407665
Generation: 49
Fitness: 0.028408315634407665
Generation: 50
Fitness: 0.028408315634407665
Generation: 51
Fitness: 0.028408315634407665
Generation: 52
Fitness: 0.028408315634407665
Generation: 53
Fitness: 0.028408315634407665
Generation: 54
Fitness: 0.028408315634407665
Generation: 55
Fitness: 0.028408315634407665
Generation: 56
Fitness: 0.028408315634407665
Generation: 57
Fitness: 0.028408315634407665
Generation: 58
Fitness: 0.028408315634407665
Generation: 59
Fitness: 0.028408315634407665
Generation: 60
Fitness: 0.028408315634407665
Generation: 61
Fitness: 0.028408315634407665
Generation: 62
Fitness: 0.028408315634407665
Generation: 63
Fitness: 0.028408315634407665
Generation: 64
Fitness: 0.028408315634407665
Generation: 65
Fitness: 0.028408315634407665
Generation: 66
Fitness: 0.028408315634407665
Generation: 67
Fitness: 0.028408315634407665
Generation: 68
Fitness: 0.028408315634407665
Generation: 69
Fitness: 0.028408315634407665
Generation: 70
Fitness: 0.028408315634407665
Generation: 71
Fitness: 0.028408315634407665
Generation: 72
Fitness: 0.028408315634407665
Generation: 73
Fitness: 0.028408315634407665
Generation: 74
Fitness: 0.028408315634407665
Generation: 75
Fitness: 0.028408315634407665
Generation: 76
Fitness: 0.028408315634407665
Generation: 77
Fitness: 0.028408315634407665
Generation: 78
Fitness: 0.028408315634407665
Generation: 79
Fitness: 0.028408315634407665
Generation: 80
Fitness: 0.028408315634407665
Generation: 81
Fitness: 0.028408315634407665
Generation: 82
Fitness: 0.028408315634407665
Generation: 83
Fitness: 0.028408315634407665
Generation: 84
Fitness: 0.028408315634407665
Generation: 85
Fitness: 0.028408315634407665
Generation: 86
Fitness: 0.028408315634407665
Generation: 87
Fitness: 0.028408315634407665
Generation: 88
Fitness: 0.028408315634407665
Generation: 89
Fitness: 0.028408315634407665
Generation: 90
Fitness: 0.028408315634407665
Generation: 91
Fitness: 0.028408315634407665
Generation: 92
Fitness: 0.028408315634407665
Generation: 93
Fitness: 0.028408315634407665
Generation: 94
Fitness: 0.028408315634407665
Generation: 95
Fitness: 0.028408315634407665
Generation: 96
Fitness: 0.028408315634407665
Generation: 97
Fitness: 0.028408315634407665
Generation: 98
Fitness: 0.028408315634407665
Generation: 99
Fitness: 0.028408315634407665
Generation: 100
Fitness: 0.028408315634407665
Generation: 101
Fitness: 0.028408315634407665
Generation: 102
Fitness: 0.028408315634407665
Generation: 103
Fitness: 0.028408315634407665
Generation: 104
Fitness: 0.028408315634407665
Generation: 105
Fitness: 0.028408315634407665
Generation: 106
Fitness: 0.028408315634407665
Generation: 107
Fitness: 0.028408315634407665
Generation: 108
Fitness: 0.028408315634407665
Generation: 109
Fitness: 0.028408315634407665
Generation: 110
Fitness: 0.028408315634407665
Generation: 111
Fitness: 0.028408315634407665
Generation: 112
Fitness: 0.028408315634407665
Generation: 113
Fitness: 0.028408315634407665
Generation: 114
Fitness: 0.028408315634407665
Generation: 115
Fitness: 0.028408315634407665
Generation: 116
Fitness: 0.028408315634407665
Generation: 117
Fitness: 0.028408315634407665
Generation: 118
Fitness: 0.028408315634407665
Generation: 119
Fitness: 0.028408315634407665
Generation: 120
Fitness: 0.028408315634407665
Generation: 121
Fitness: 0.028408315634407665
Generation: 122
Fitness: 0.028408315634407665
Generation: 123
Fitness: 0.028408315634407665
Generation: 124
Fitness: 0.028408315634407665
Generation: 125
Fitness: 0.028408315634407665
Generation: 126
Fitness: 0.028408315634407665
Generation: 127
Fitness: 0.028408315634407665
Generation: 128
Fitness: 0.028408315634407665
Generation: 129
Fitness: 0.028408315634407665
Generation: 130
Fitness: 0.028408315634407665
Generation: 131
Fitness: 0.028408315634407665
Generation: 132
Fitness: 0.028408315634407665
Generation: 133
Fitness: 0.028408315634407665
Generation: 134
Fitness: 0.028408315634407665
Generation: 135
Fitness: 0.028408315634407665
Generation: 136
Fitness: 0.028408315634407665
Generation: 137
Fitness: 0.028408315634407665
Generation: 138
Fitness: 0.028408315634407665
Generation: 139
Fitness: 0.028408315634407665
Generation: 140
Fitness: 0.028408315634407665
Generation: 141
Fitness: 0.028408315634407665
Generation: 142
Fitness: 0.028408315634407665
Generation: 143
Fitness: 0.028408315634407665
Generation: 144
Fitness: 0.028408315634407665
Generation: 145
Fitness: 0.028408315634407665
Generation: 146
Fitness: 0.028408315634407665
Generation: 147
Fitness: 0.028408315634407665
Generation: 148
Fitness: 0.028408315634407665
Generation: 149
Fitness: 0.028408315634407665
Generation: 150
Fitness: 0.028408315634407665
Generation: 151
Fitness: 0.028408315634407665
Generation: 152
Fitness: 0.028408315634407665
Generation: 153
Fitness: 0.028408315634407665
Generation: 154
Fitness: 0.028408315634407665
Generation: 155
Fitness: 0.028408315634407665
Generation: 156
Fitness: 0.028408315634407665
Generation: 157
Fitness: 0.028408315634407665
Generation: 158
Fitness: 0.028408315634407665
Generation: 159
Fitness: 0.028408315634407665
Generation: 160
Fitness: 0.028408315634407665
Generation: 161
Fitness: 0.028408315634407665
Generation: 162
Fitness: 0.028408315634407665
Generation: 163
Fitness: 0.028408315634407665
Generation: 164
Fitness: 0.028408315634407665
Generation: 165
Fitness: 0.028408315634407665
Generation: 166
Fitness: 0.028408315634407665
Generation: 167
Fitness: 0.028408315634407665
Generation: 168
Fitness: 0.028408315634407665
Generation: 169
Fitness: 0.028408315634407665
Generation: 170
Fitness: 0.028408315634407665
Generation: 171
Fitness: 0.028408315634407665
Generation: 172
Fitness: 0.028408315634407665
Generation: 173
Fitness: 0.028408315634407665
Generation: 174
Fitness: 0.028408315634407665
Generation: 175
Fitness: 0.028408315634407665
Generation: 176
Fitness: 0.028408315634407665
Generation: 177
Fitness: 0.028408315634407665
Generation: 178
Fitness: 0.028408315634407665
Generation: 179
Fitness: 0.028408315634407665
Generation: 180
Fitness: 0.028408315634407665
Generation: 181
Fitness: 0.028408315634407665
Generation: 182
Fitness: 0.028408315634407665
Generation: 183
Fitness: 0.028408315634407665
Generation: 184
Fitness: 0.028408315634407665
Generation: 185
Fitness: 0.028408315634407665
Generation: 186
Fitness: 0.028408315634407665
Generation: 187
Fitness: 0.028408315634407665
Generation: 188
Fitness: 0.028408315634407665
Generation: 189
Fitness: 0.028408315634407665
Generation: 190
Fitness: 0.028408315634407665
Generation: 191
Fitness: 0.028408315634407665
Generation: 192
Fitness: 0.028408315634407665
Generation: 193
Fitness: 0.028408315634407665
Generation: 194
Fitness: 0.028408315634407665
Generation: 195
Fitness: 0.028408315634407665
Generation: 196
Fitness: 0.028408315634407665
Generation: 197
Fitness: 0.028408315634407665
Generation: 198
Fitness: 0.028408315634407665
Generation: 199
Fitness: 0.028408315634407665
Generation: 200
Fitness: 0.028408315634407665
Generation: 201
Fitness: 0.028408315634407665
Generation: 202
Fitness: 0.028408315634407665
Generation: 203
Fitness: 0.028408315634407665
Generation: 204
Fitness: 0.028408315634407665
Generation: 205
Fitness: 0.028408315634407665
Generation: 206
Fitness: 0.028408315634407665
Generation: 207
Fitness: 0.028408315634407665
Generation: 208
Fitness: 0.028408315634407665
Generation: 209
Fitness: 0.028408315634407665
Generation: 210
Fitness: 0.028408315634407665
Generation: 211
Fitness: 0.028408315634407665
Generation: 212
Fitness: 0.028408315634407665
Generation: 213
Fitness: 0.028408315634407665
Generation: 214
Fitness: 0.028408315634407665
Generation: 215
Fitness: 0.028408315634407665
Generation: 216
Fitness: 0.028408315634407665
Generation: 217
Fitness: 0.028408315634407665
Generation: 218
Fitness: 0.028408315634407665
Generation: 219
Fitness: 0.028408315634407665
Generation: 220
Fitness: 0.028408315634407665
Generation: 221
Fitness: 0.028408315634407665
Generation: 222
Fitness: 0.028408315634407665
Generation: 223
Fitness: 0.028408315634407665
Generation: 224
Fitness: 0.028408315634407665
Generation: 225
Fitness: 0.028408315634407665
Generation: 226
Fitness: 0.028408315634407665
Generation: 227
Fitness: 0.028408315634407665
Generation: 228
Fitness: 0.028408315634407665
Generation: 229
Fitness: 0.028408315634407665
Generation: 230
Fitness: 0.028408315634407665
Generation: 231
Fitness: 0.028408315634407665
Generation: 232
Fitness: 0.028408315634407665
Generation: 233
Fitness: 0.028408315634407665
Generation: 234
Fitness: 0.028408315634407665
Generation: 235
Fitness: 0.028408315634407665
Generation: 236
Fitness: 0.028408315634407665
Generation: 237
Fitness: 0.028408315634407665
Generation: 238
Fitness: 0.028408315634407665
Generation: 239
Fitness: 0.028408315634407665
Generation: 240
Fitness: 0.028408315634407665
Generation: 241
Fitness: 0.028408315634407665
Generation: 242
Fitness: 0.028408315634407665
Generation: 243
Fitness: 0.028408315634407665
Generation: 244
Fitness: 0.028408315634407665
Generation: 245
Fitness: 0.028408315634407665
Generation: 246
Fitness: 0.028408315634407665
Generation: 247
Fitness: 0.028408315634407665
Generation: 248
Fitness: 0.028408315634407665
Generation: 249
Fitness: 0.028408315634407665
Generation: 250
Fitness: 0.028408315634407665
Generation: 251
Fitness: 0.028408315634407665
Generation: 252
Fitness: 0.028408315634407665
Generation: 253
Fitness: 0.028408315634407665
Generation: 254
Fitness: 0.028408315634407665
Generation: 255
Fitness: 0.028408315634407665
Generation: 256
Fitness: 0.028408315634407665
Generation: 257
Fitness: 0.028408315634407665
Generation: 258
Fitness: 0.028408315634407665
Generation: 259
Fitness: 0.028408315634407665
Generation: 260
Fitness: 0.028408315634407665
Generation: 261
Fitness: 0.028408315634407665
Generation: 262
Fitness: 0.028408315634407665
Generation: 263
Fitness: 0.028408315634407665
Generation: 264
Fitness: 0.028408315634407665
Generation: 265
Fitness: 0.028408315634407665
Generation: 266
Fitness: 0.028408315634407665
Generation: 267
Fitness: 0.028408315634407665
Generation: 268
Fitness: 0.028408315634407665
Generation: 269
Fitness: 0.028408315634407665
Generation: 270
Fitness: 0.028408315634407665
Generation: 271
Fitness: 0.028408315634407665
Generation: 272
Fitness: 0.028408315634407665
Generation: 273
Fitness: 0.028408315634407665
Generation: 274
Fitness: 0.028408315634407665
Generation: 275
Fitness: 0.028408315634407665
Generation: 276
Fitness: 0.028408315634407665
Generation: 277
Fitness: 0.028408315634407665
Generation: 278
Fitness: 0.028408315634407665
Generation: 279
Fitness: 0.028408315634407665
Generation: 280
Fitness: 0.028408315634407665
Generation: 281
Fitness: 0.028408315634407665
Generation: 282
Fitness: 0.028408315634407665
Generation: 283
Fitness: 0.028408315634407665
Generation: 284
Fitness: 0.028408315634407665
Generation: 285
Fitness: 0.028408315634407665
Generation: 286
Fitness: 0.028408315634407665
Generation: 287
Fitness: 0.028408315634407665
Generation: 288
Fitness: 0.028408315634407665
Generation: 289
Fitness: 0.028408315634407665
Generation: 290
Fitness: 0.028408315634407665
Generation: 291
Fitness: 0.028408315634407665
Generation: 292
Fitness: 0.028408315634407665
Generation: 293
Fitness: 0.028408315634407665
Generation: 294
Fitness: 0.028408315634407665
Generation: 295
Fitness: 0.028408315634407665
Generation: 296
Fitness: 0.028408315634407665
Generation: 297
Fitness: 0.028408315634407665
Generation: 298
Fitness: 0.028408315634407665
Generation: 299
Fitness: 0.028408315634407665
Generation: 300
Fitness: 0.028408315634407665
Generation: 301
Fitness: 0.028408315634407665
Generation: 302
Fitness: 0.028408315634407665
Generation: 303
Fitness: 0.028408315634407665
Generation: 304
Fitness: 0.028408315634407665
Generation: 305
Fitness: 0.028408315634407665
Generation: 306
Fitness: 0.028408315634407665
Generation: 307
Fitness: 0.028408315634407665
Generation: 308
Fitness: 0.028408315634407665
Generation: 309
Fitness: 0.028408315634407665
Generation: 310
Fitness: 0.028408315634407665
Generation: 311
Fitness: 0.028408315634407665
Generation: 312
Fitness: 0.028408315634407665
Generation: 313
Fitness: 0.028408315634407665
Generation: 314
Fitness: 0.028408315634407665
Generation: 315
Fitness: 0.028408315634407665
Generation: 316
Fitness: 0.028408315634407665
Generation: 317
Fitness: 0.028408315634407665
Generation: 318
Fitness: 0.028408315634407665
Generation: 319
Fitness: 0.028408315634407665
Generation: 320
Fitness: 0.028408315634407665
Generation: 321
Fitness: 0.028408315634407665
Generation: 322
Fitness: 0.028408315634407665
Generation: 323
Fitness: 0.028408315634407665
Generation: 324
Fitness: 0.028408315634407665
Generation: 325
Fitness: 0.028408315634407665
Generation: 326
Fitness: 0.028408315634407665
Generation: 327
Fitness: 0.028408315634407665
Generation: 328
Fitness: 0.028408315634407665
Generation: 329
Fitness: 0.028408315634407665
Generation: 330
Fitness: 0.028408315634407665
Generation: 331
Fitness: 0.028408315634407665
Generation: 332
Fitness: 0.028408315634407665
Generation: 333
Fitness: 0.028408315634407665
Generation: 334
Fitness: 0.028408315634407665
Generation: 335
Fitness: 0.028408315634407665
Generation: 336
Fitness: 0.028408315634407665
Generation: 337
Fitness: 0.028408315634407665
Generation: 338
Fitness: 0.028408315634407665
Generation: 339
Fitness: 0.028408315634407665
Generation: 340
Fitness: 0.028408315634407665
Generation: 341
Fitness: 0.028408315634407665
Generation: 342
Fitness: 0.028408315634407665
Generation: 343
Fitness: 0.028408315634407665
Generation: 344
Fitness: 0.028408315634407665
Generation: 345
Fitness: 0.028408315634407665
Generation: 346
Fitness: 0.028408315634407665
Generation: 347
Fitness: 0.028408315634407665
Generation: 348
Fitness: 0.028408315634407665
Generation: 349
Fitness: 0.028408315634407665
Generation: 350
Fitness: 0.028408315634407665
In [405]:
solution_nn, solution_fitness_nn, solution_idx_nn = ga_instance.best_solution()

best_solution_weights_nn = pygad.kerasga.model_weights_as_matrix(model=model_nn,
                                                              weights_vector=solution_nn)
model_nn.set_weights(best_solution_weights_nn)
In [406]:
filename = 'genetic_ANN'
ga_instance.save(filename=filename)
In [407]:
predictions_ga_ann_test = model_nn.predict(X_test_arr)
#print("Predictions : \n", predictions_ga_ann_test)

predictions_ga_ann_train = model_nn.predict(X_train_arr)
#print("Predictions : \n", predictions_ga_ann_train)
In [439]:
plt.figure(figsize = (12,6), dpi = 400)
plt.plot(y_train, label = 'Actual')
plt.plot(ynew_nn_train_no_covid, label = 'ANN')
plt.plot(ynew1_train_no_covid, label = 'LSTM')
plt.plot(predictions_ga_ann_train, label = 'ANN_GA', alpha = 0.2)
plt.legend()
Out[439]:
<matplotlib.legend.Legend at 0x4483a80a0>
In [440]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot(y_test, label = 'Actual')
plt.plot(ynew_nn_test_no_covid, label = 'ANN')
plt.plot(ynew1_test_no_covid, label = 'LSTM')
plt.plot(predictions_ga_ann_test, label = 'ANN_GA',  marker = '*', alpha  =  0.5)
plt.legend()
Out[440]:
<matplotlib.legend.Legend at 0x511bd7f10>
In [ ]:
 
In [410]:
#LSTM GA implementation
In [411]:
model_lstm = Sequential()

model_lstm.add(LSTM(32, input_shape=(X_train.shape[1],1), return_sequences=True))
model_lstm.add(LSTM(32))
#model_lstm.add(Dropout(0.2))
#model_lstm.add(BatchNormalization())  #normalizes activation outputs, same reason you want to normalize your input data.


model_lstm.add(Dense(32, activation='relu'))


model_lstm.add(Dense(1, activation='relu'))

opt = tf.keras.optimizers.Adam(lr=0.001, decay=1e-6)
In [412]:
def fitness_func(solution, solution_idx):
    global X_train_arr, y_train, keras_ga, model_lstm
    model_weights_matrix = pygad.kerasga.model_weights_as_matrix(model = model_lstm, weights_vector = solution)
    model_lstm.set_weights(weights = model_weights_matrix)
    predictions = model_lstm.predict(X_train_arr)
    mse = tf.keras.losses.MeanSquaredError()
    solution_fitness = 1/(mse(y_train, predictions).numpy() + 0.00001)
    return solution_fitness

weights_vector = pygad.kerasga.model_weights_as_vector(model=model_lstm)
keras_ga = pygad.kerasga.KerasGA(model=model_lstm,
                                 num_solutions=10 )

def callback_generation(ga_instance):
    print(f'Generation: {ga_instance.generations_completed}')
    print(f'Fitness: {ga_instance.best_solution()[1]}')
    
num_generations = 100
num_parents_mating = 8
initial_population = keras_ga.population_weights
crossover_type = "single_point"
mutation_type = "random"
mutation_percent_genes = 20


ga_instance = pygad.GA(num_generations= num_generations, num_parents_mating=num_parents_mating, 
                       initial_population=initial_population, fitness_func = fitness_func, on_generation= callback_generation,
                       crossover_type=crossover_type, mutation_type=mutation_type, mutation_percent_genes=mutation_percent_genes
                      )
In [413]:
ga_instance.run()
2022-12-08 18:04:44.065877: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 18:04:44.132712: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
2022-12-08 18:04:44.269400: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.
Generation: 1
Fitness: 0.0001723779049291833
Generation: 2
Fitness: 0.0001723779049291833
Generation: 3
Fitness: 0.00018587898716946677
Generation: 4
Fitness: 0.00019052725964102373
Generation: 5
Fitness: 0.00019052725964102373
Generation: 6
Fitness: 0.0002005920988938515
Generation: 7
Fitness: 0.00022988198627600241
Generation: 8
Fitness: 0.00024247653732067695
Generation: 9
Fitness: 0.00027249062075031943
Generation: 10
Fitness: 0.0002776528241974532
Generation: 11
Fitness: 0.0002776528241974532
Generation: 12
Fitness: 0.0003798265357837497
Generation: 13
Fitness: 0.0003798265357837497
Generation: 14
Fitness: 0.0006371002936640985
Generation: 15
Fitness: 0.0006371002936640985
Generation: 16
Fitness: 0.0006371002936640985
Generation: 17
Fitness: 0.0006371002936640985
Generation: 18
Fitness: 0.0007489017818459667
Generation: 19
Fitness: 0.001047445217437366
Generation: 20
Fitness: 0.0013898041755620683
Generation: 21
Fitness: 0.0013898041755620683
Generation: 22
Fitness: 0.0013898041755620683
Generation: 23
Fitness: 0.0013898041755620683
Generation: 24
Fitness: 0.0013898041755620683
Generation: 25
Fitness: 0.0013898041755620683
Generation: 26
Fitness: 0.0013898041755620683
Generation: 27
Fitness: 0.0013898041755620683
Generation: 28
Fitness: 0.0013898041755620683
Generation: 29
Fitness: 0.0013898041755620683
Generation: 30
Fitness: 0.0013898041755620683
Generation: 31
Fitness: 0.0014958442257637335
Generation: 32
Fitness: 0.0014958442257637335
Generation: 33
Fitness: 0.0014958442257637335
Generation: 34
Fitness: 0.0014958442257637335
Generation: 35
Fitness: 0.0014958442257637335
Generation: 36
Fitness: 0.0014958442257637335
Generation: 37
Fitness: 0.0015872079158074398
Generation: 38
Fitness: 0.0015872079158074398
Generation: 39
Fitness: 0.0015872079158074398
Generation: 40
Fitness: 0.0015872079158074398
Generation: 41
Fitness: 0.0016379760649577891
Generation: 42
Fitness: 0.0016379760649577891
Generation: 43
Fitness: 0.0016379760649577891
Generation: 44
Fitness: 0.0016379760649577891
Generation: 45
Fitness: 0.0016379760649577891
Generation: 46
Fitness: 0.0016801860310453262
Generation: 47
Fitness: 0.0017293495788471502
Generation: 48
Fitness: 0.0017293495788471502
Generation: 49
Fitness: 0.0017293495788471502
Generation: 50
Fitness: 0.0017293495788471502
Generation: 51
Fitness: 0.0017293495788471502
Generation: 52
Fitness: 0.0017329962659838225
Generation: 53
Fitness: 0.0017329962659838225
Generation: 54
Fitness: 0.0017329962659838225
Generation: 55
Fitness: 0.0017329962659838225
Generation: 56
Fitness: 0.0017329962659838225
Generation: 57
Fitness: 0.0017329962659838225
Generation: 58
Fitness: 0.0017329962659838225
Generation: 59
Fitness: 0.0017329962659838225
Generation: 60
Fitness: 0.0017329962659838225
Generation: 61
Fitness: 0.0017329962659838225
Generation: 62
Fitness: 0.0017329962659838225
Generation: 63
Fitness: 0.0017329962659838225
Generation: 64
Fitness: 0.0017329962659838225
Generation: 65
Fitness: 0.0017329962659838225
Generation: 66
Fitness: 0.0017329962659838225
Generation: 67
Fitness: 0.0017329962659838225
Generation: 68
Fitness: 0.0017329962659838225
Generation: 69
Fitness: 0.0017329962659838225
Generation: 70
Fitness: 0.0017329962659838225
Generation: 71
Fitness: 0.0017329962659838225
Generation: 72
Fitness: 0.0017329962659838225
Generation: 73
Fitness: 0.0017329962659838225
Generation: 74
Fitness: 0.0017329962659838225
Generation: 75
Fitness: 0.0017329962659838225
Generation: 76
Fitness: 0.0017329962659838225
Generation: 77
Fitness: 0.0017329962659838225
Generation: 78
Fitness: 0.0017329962659838225
Generation: 79
Fitness: 0.0017329962659838225
Generation: 80
Fitness: 0.0017329962659838225
Generation: 81
Fitness: 0.0017329962659838225
Generation: 82
Fitness: 0.0017329962659838225
Generation: 83
Fitness: 0.0017329962659838225
Generation: 84
Fitness: 0.0017329962659838225
Generation: 85
Fitness: 0.0017329962659838225
Generation: 86
Fitness: 0.0017329962659838225
Generation: 87
Fitness: 0.0017329962659838225
Generation: 88
Fitness: 0.0017329962659838225
Generation: 89
Fitness: 0.0017329962659838225
Generation: 90
Fitness: 0.0017329962659838225
Generation: 91
Fitness: 0.0017329962659838225
Generation: 92
Fitness: 0.0017329962659838225
Generation: 93
Fitness: 0.0017329962659838225
Generation: 94
Fitness: 0.0017329962659838225
Generation: 95
Fitness: 0.0017329962659838225
Generation: 96
Fitness: 0.0017329962659838225
Generation: 97
Fitness: 0.0017329962659838225
Generation: 98
Fitness: 0.0017329962659838225
Generation: 99
Fitness: 0.0017329962659838225
Generation: 100
Fitness: 0.0017329962659838225
In [414]:
solution_lstm, solution_fitness_lstm, solution_idx_lstm = ga_instance.best_solution()

best_solution_weights_lstm = pygad.kerasga.model_weights_as_matrix(model=model_lstm,
                                                              weights_vector=solution_lstm)
model_lstm.set_weights(best_solution_weights_lstm)
In [415]:
filename = 'genetic_LSTM'
ga_instance.save(filename=filename)
In [416]:
predictions_ga_lstm_test = model_lstm.predict(X_test_arr)


predictions_ga_lstm_train = model_lstm.predict(X_train_arr)
#print("Predictions : \n", predictions_ga_ann_train)
In [ ]:
 
In [441]:
plt.figure(figsize = (12,6), dpi = 400)
plt.plot(y_train, label = 'Actual')
plt.plot(ynew_nn_train_no_covid, label = 'ANN')
plt.plot(ynew1_train_no_covid, label = 'LSTM')
plt.plot(predictions_ga_ann_train, label = 'ANN_GA', alpha = 0.15)
plt.plot(predictions_ga_lstm_train, label = 'LSTM_GA')
plt.legend()
Out[441]:
<matplotlib.legend.Legend at 0x526941be0>
In [450]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot(y_test, label = 'Actual')
plt.plot(ynew_nn_test_no_covid, label = 'ANN')
plt.plot(ynew1_test_no_covid, label = 'LSTM')
plt.plot(predictions_ga_ann_test, label = 'ANN_GA',  marker = '*', alpha  =  0.5)
plt.plot(predictions_ga_ann_test, label = 'LSTM_GA', alpha  =  0.5)
plt.legend()
plt.xticks()
Out[450]:
(array([-50.,   0.,  50., 100., 150., 200., 250., 300.]),
 [Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, ''),
  Text(0, 0, '')])
In [448]:
for i in range(X_train.shape[0]+ X_test.shape[0]):
    
  Input In [448]
    for i in range(X_train.shape[0]+ X_test.shape[0])
                                                     ^
SyntaxError: invalid syntax
In [446]:
X_train
Out[446]:
NASDAQCOM DEXJPUS DEXUSEU DEXCHUS DEXUSAL VIXCLS DCOILWTICO GOLD NIFTY50 SNP500 ... 22 23 24 25 26 27 28 29 30 31
0 0.177660 0.824509 0.670630 0.991767 0.590551 0.116007 0.443083 0.036713 0.260304 0.323311 ... 0 0 0 0 0 0 0 0 0 0
1 0.179594 0.827517 0.659627 0.991564 0.585908 0.097323 0.453660 0.030288 0.284918 0.327950 ... 0 0 0 0 0 0 0 0 0 0
2 0.180495 0.831328 0.671162 0.990215 0.589340 0.104992 0.462643 0.040050 0.296526 0.326961 ... 0 0 0 0 0 0 0 0 0 0
3 0.175885 0.842158 0.683762 0.990552 0.599637 0.135806 0.477334 0.045891 0.305003 0.323986 ... 0 0 0 0 0 0 0 0 0 0
4 0.179801 0.837946 0.673114 0.991227 0.599435 0.119771 0.483042 0.046725 0.295184 0.326714 ... 0 0 0 0 0 0 0 0 0 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
2572 0.737215 0.754112 0.192369 0.591983 0.244094 0.100669 0.298019 0.452691 0.820707 0.788804 ... 0 0 0 0 0 0 0 0 0 0
2573 0.718048 0.763137 0.168589 0.617560 0.232788 0.114612 0.285426 0.439341 0.819366 0.776670 ... 0 0 0 0 0 0 0 0 0 0
2574 0.693947 0.767148 0.162023 0.617087 0.231577 0.151701 0.247146 0.421402 0.819091 0.754958 ... 0 0 0 0 0 0 0 0 0 0
2575 0.686404 0.761733 0.166282 0.613983 0.231375 0.168851 0.251595 0.424656 0.818461 0.747356 ... 0 0 0 0 0 0 0 0 0 0
2576 0.700750 0.756919 0.168412 0.605885 0.241470 0.151143 0.254030 0.432290 0.822567 0.757918 ... 0 0 0 0 0 0 0 0 0 0

2577 rows × 81 columns

In [ ]:
 
In [454]:
fred_df= pd.read_csv('fred_df.csv', index_col= 'Unnamed: 0')

fred_df.columns = ['DATE', 'NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'DCOILWTICO', 'GOLD', 'NIFTY50', 'SNP500', 'MONTH',
       'DAY']

cols = ['NASDAQCOM', 'DEXJPUS', 'DEXUSEU', 'DEXCHUS', 'DEXUSAL',
       'VIXCLS', 'GOLD', 'NIFTY50', 'SNP500', 'DCOILWTICO']

fred_df.dropna(inplace = True)

for i in cols:
    col_name_roll = f'{i}_rolling_5'
    col_name_roll1 = f'{i}_rolling_10'
    col_name_roll2 = f'{i}_rolling_15'
    fred_df[col_name_roll] = fred_df[i].rolling(window = ROLLING).mean()
    fred_df[col_name_roll1] = fred_df[i].rolling(window = ROLLING1).mean()
    fred_df[col_name_roll2] = fred_df[i].rolling(window = ROLLING2).mean()
    
months = pd.get_dummies(fred_df['MONTH'], drop_first=True)
months.columns = ['FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']

day = pd.get_dummies(fred_df['DAY'], drop_first=True)

fred_df = pd.concat([fred_df, months, day], axis = 1)
In [462]:
fred_df = fred_df[fred_df['DATE'] <= '2020-3-1']
In [464]:
fred_df['OIL_PRICE_FUTURE'] = fred_df['DCOILWTICO'].shift(-FUTURE)

del fred_df['DAY']
del fred_df['MONTH']

cols = ['NASDAQCOM',
                     'DEXJPUS',               'DEXUSEU',
                     'DEXCHUS',               'DEXUSAL',
                      'VIXCLS',            'DCOILWTICO',
                        'GOLD',               'NIFTY50',
                      'SNP500',   'NASDAQCOM_rolling_5',
        'NASDAQCOM_rolling_10',  'NASDAQCOM_rolling_15',
           'DEXJPUS_rolling_5',    'DEXJPUS_rolling_10',
          'DEXJPUS_rolling_15',     'DEXUSEU_rolling_5',
          'DEXUSEU_rolling_10',    'DEXUSEU_rolling_15',
           'DEXCHUS_rolling_5',    'DEXCHUS_rolling_10',
          'DEXCHUS_rolling_15',     'DEXUSAL_rolling_5',
          'DEXUSAL_rolling_10',    'DEXUSAL_rolling_15',
            'VIXCLS_rolling_5',     'VIXCLS_rolling_10',
           'VIXCLS_rolling_15',        'GOLD_rolling_5',
             'GOLD_rolling_10',       'GOLD_rolling_15',
           'NIFTY50_rolling_5',    'NIFTY50_rolling_10',
          'NIFTY50_rolling_15',      'SNP500_rolling_5',
           'SNP500_rolling_10',     'SNP500_rolling_15',
        'DCOILWTICO_rolling_5', 'DCOILWTICO_rolling_10',
       'DCOILWTICO_rolling_15']

minmaxscaler = MinMaxScaler()

fred_df[cols] = minmaxscaler.fit_transform(fred_df.loc[:, fred_df.columns.isin(cols)])

fred_df.dropna(inplace = True)
In [526]:
len(y_train)
Out[526]:
2577
In [529]:
list(y_train).append(list(y_test))
In [539]:
arr = list(y_train)+ list(y_test)
In [572]:
arr_nn = list(ynew_nn_train_no_covid) + list(ynew_nn_test_no_covid)
arr_lstm = list(ynew1_train_no_covid) + list(ynew1_test_no_covid)
arr_ga_nn = list(predictions_ga_ann_train) + list(predictions_ga_ann_test)
arr_ga_lstm = list(predictions_ga_lstm_train) + list(predictions_ga_lstm_test)
In [573]:
arr_nn1 = []
for x in arr_nn:
    arr_nn1.append(float(x))
    
arr_lstm1 = []
for x in arr_lstm:
    arr_lstm1.append(float(x))
    
arr_ga_nn1 = []
for x in arr_ga_nn:
    arr_ga_nn1.append(float(x))
arr_ga_lstm1 = []
for x in arr_ga_lstm:
    arr_ga_lstm1.append(float(x))
In [576]:
len(arr_lstm1)
Out[576]:
2863
In [598]:
len(y_train)
Out[598]:
2577
In [577]:
temp = pd.DataFrame(date_arr)
In [578]:
temp['Actual'] = arr
In [579]:
temp['NN'] = arr_nn1
temp['LSTM'] = arr_lstm1
temp['GA_NN'] = arr_ga_nn1
temp['GA_LSTM'] = arr_ga_lstm1
In [612]:
#temp.to_csv('temp.csv')
In [604]:
temp['DATE'][2577]
Out[604]:
Timestamp('2018-08-31 00:00:00')
In [620]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot( temp['DATE'],temp['Actual'], label = 'Actual')
plt.plot( temp['DATE'],temp['NN'], label = 'ANN')
plt.plot( temp['DATE'],temp['LSTM'], label = 'LSTM')
plt.plot( temp['DATE'],temp['GA_NN'], label = 'GA-ANN', alpha = 0.2)
plt.plot( temp['DATE'],temp['GA_LSTM'], label = 'GA-LSTM', alpha = 1)
plt.vlines(x = datetime(2018,8,18), ymin = 0, ymax= 240, color = 'red', linewidth = 2)
#plt.plot(predictions_ga_ann_test, label = 'ANN_GA',  marker = '*', alpha  =  0.5)
#plt.plot(predictions_ga_ann_test, label = 'LSTM_GA', alpha  =  0.5)
plt.legend()
plt.grid(linestyle = '-.', alpha = 0.5)
plt.ylim(0,220)
#plt.savefig('Plot1.jpg', dpi = 800)
plt.show()
In [621]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot( temp['DATE'],temp['Actual'], label = 'Actual')
#plt.plot( temp['DATE'],temp['NN'], label = 'ANN')
#plt.plot( temp['DATE'],temp['LSTM'], label = 'LSTM')
plt.plot( temp['DATE'],temp['GA_NN'], label = 'GA-ANN', alpha = 0.2)
plt.plot( temp['DATE'],temp['GA_LSTM'], label = 'GA-LSTM', alpha = 1)
plt.vlines(x = datetime(2018,8,18), ymin = 0, ymax= 240, color = 'red', linewidth = 2)
#plt.plot(predictions_ga_ann_test, label = 'ANN_GA',  marker = '*', alpha  =  0.5)
#plt.plot(predictions_ga_ann_test, label = 'LSTM_GA', alpha  =  0.5)
plt.legend()
plt.grid(linestyle = '-.', alpha = 0.5)
plt.ylim(0,220)
#plt.savefig('Plot2.jpg', dpi = 800)
plt.show()
In [622]:
plt.figure(figsize = (12,6), dpi = 600)

plt.plot( temp['DATE'],temp['Actual'], label = 'Actual')
plt.plot( temp['DATE'],temp['NN'], label = 'ANN')
plt.plot( temp['DATE'],temp['LSTM'], label = 'LSTM')
#plt.plot( temp['DATE'],temp['GA_NN'], label = 'GA-ANN', alpha = 0.2)
#plt.plot( temp['DATE'],temp['GA_LSTM'], label = 'GA-LSTM', alpha = 1)
plt.vlines(x = datetime(2018,8,18), ymin = 0, ymax= 240, color = 'red', linewidth = 2)
#plt.plot(predictions_ga_ann_test, label = 'ANN_GA',  marker = '*', alpha  =  0.5)
#plt.plot(predictions_ga_ann_test, label = 'LSTM_GA', alpha  =  0.5)
plt.legend()
plt.grid(linestyle = '-.', alpha = 0.5)
plt.ylim(0,220)
plt.savefig('Plot3.jpg', dpi = 800)
plt.show()
In [654]:
from sklearn.metrics import mean_absolute_error, mean_squared_error, mean_absolute_percentage_error
In [629]:
ann_training = mean_squared_error(temp['Actual'][:2577], temp['NN'][:2577])
lstm_training = mean_squared_error(temp['Actual'][:2577], temp['LSTM'][:2577])
ga_ann_training = mean_squared_error(temp['Actual'][:2577], temp['GA_NN'][:2577])
ga_lstm_training = mean_squared_error(temp['Actual'][:2577], temp['GA_LSTM'][:2577])
In [630]:
ga_lstm_training
Out[630]:
590.7967152384599
In [631]:
ga_ann_training
Out[631]:
2341.6971906865315
In [633]:
lstm_training
Out[633]:
40.25686688210681
In [639]:
ann_training
Out[639]:
2495.1646536565645
In [640]:
ann_test = mean_squared_error(temp['Actual'][2577:], temp['NN'][2577:])
lstm_test = mean_squared_error(temp['Actual'][2577:], temp['LSTM'][2577:])
ga_ann_test = mean_squared_error(temp['Actual'][2577:], temp['GA_NN'][2577:])
ga_lstm_test = mean_squared_error(temp['Actual'][2577:], temp['GA_LSTM'][2577:])
In [641]:
ann_test
Out[641]:
1071.1754678411933
In [642]:
ann_training
Out[642]:
2495.1646536565645
In [643]:
ga_ann_test
Out[643]:
7631.288337675727
In [644]:
ga_lstm_test
Out[644]:
602.2008274671557
In [645]:
ann_training1 = mean_absolute_error(temp['Actual'][:2577], temp['NN'][:2577])
lstm_training1 = mean_absolute_error(temp['Actual'][:2577], temp['LSTM'][:2577])
ga_ann_training1 = mean_absolute_error(temp['Actual'][:2577], temp['GA_NN'][:2577])
ga_lstm_training1 = mean_absolute_error(temp['Actual'][:2577], temp['GA_LSTM'][:2577])

ann_test1 = mean_absolute_error(temp['Actual'][2577:], temp['NN'][2577:])
lstm_test1 = mean_absolute_error(temp['Actual'][2577:], temp['LSTM'][2577:])
ga_ann_test1 = mean_absolute_error(temp['Actual'][2577:], temp['GA_NN'][2577:])
ga_lstm_test1 = mean_absolute_error(temp['Actual'][2577:], temp['GA_LSTM'][2577:])
In [646]:
print(ann_training1)
48.07990701017428
In [647]:
print(lstm_training1)
5.096282185268439
In [648]:
print(ga_ann_training1)
37.83653260459352
In [649]:
print(ga_lstm_training1)
21.046912748005948
In [650]:
print(ann_test1)
32.571596057064895
In [651]:
print(lstm_test1)
8.285258368111991
In [652]:
print(ga_ann_test1)
80.83341812720664
In [658]:
print(ga_lstm_test1)
24.15873193594126
In [655]:
ann_training2 = mean_absolute_percentage_error(temp['Actual'][:2577], temp['NN'][:2577])
lstm_training2 = mean_absolute_percentage_error(temp['Actual'][:2577], temp['LSTM'][:2577])
ga_ann_training2 = mean_absolute_percentage_error(temp['Actual'][:2577], temp['GA_NN'][:2577])
ga_lstm_training2 = mean_absolute_percentage_error(temp['Actual'][:2577], temp['GA_LSTM'][:2577])

ann_test2 = mean_absolute_percentage_error(temp['Actual'][2577:], temp['NN'][2577:])
lstm_test2 = mean_absolute_percentage_error(temp['Actual'][2577:], temp['LSTM'][2577:])
ga_ann_test2 = mean_absolute_percentage_error(temp['Actual'][2577:], temp['GA_NN'][2577:])
ga_lstm_test2 = mean_absolute_percentage_error(temp['Actual'][2577:], temp['GA_LSTM'][2577:])
In [656]:
print(ann_training2)
0.6521149341927295
In [657]:
print(lstm_training2)
0.06604650064386207
In [659]:
print(ga_ann_training2)
0.6535313708592368
In [660]:
print(ga_lstm_training2)
0.33932325551203324
In [661]:
print(ann_test2)
0.5823109981230512
In [662]:
print(lstm_test2)
0.15066860282233266
In [663]:
print(ga_ann_test2)
1.4486507287368866
In [664]:
print(ga_lstm_test2)
0.4389428168841175
In [ ]:
 
In [ ]: